#Import Libraries
import os
import sys
import glob
import pickle
import sklearn
import librosa
import warnings
import collections
import numpy as np
import pandas as pd
import seaborn as sns
import librosa.display
import tensorflow as tf
from scipy import stats
from tqdm import notebook
from tensorflow import keras
import IPython.display as ipd
import scipy.io.wavfile as wav
import matplotlib.pyplot as plt
warnings.filterwarnings("ignore")
from sklearn import preprocessing
from prettytable import PrettyTable
from matplotlib.pyplot import figure
tf.config.run_functions_eagerly(True)
from tensorflow.keras.layers import GRU
from astropy.table import Table, Column
from python_speech_features import mfcc
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import Conv1D
from tensorflow.keras.layers import Flatten
from tensorflow.keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
from sklearn.neural_network import MLPClassifier
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.layers import Bidirectional
from tensorflow.keras.layers import TimeDistributed
from imblearn.over_sampling import RandomOverSampler
from sklearn.model_selection import train_test_split
from tensorflow.keras.callbacks import ReduceLROnPlateau, ModelCheckpoint
from sklearn.metrics import accuracy_score, confusion_matrix, balanced_accuracy_score, classification_report
from tensorflow.keras.layers import Dense, Conv1D, MaxPooling1D, Flatten, Dropout, BatchNormalization, LSTM, GlobalAveragePooling1D
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/SAVEE Database')
sample_data_1 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_1[i*100]))
data, sampling_rate = librosa.load(sample_data_1[i*100])
plt.figure(figsize=(10, 2),facecolor="Green")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/EmoDB')
sample_data_2 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_2[i*100]))
data, sampling_rate = librosa.load(sample_data_2[i*100])
plt.figure(figsize=(10, 2),facecolor="blue")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/CREMA-D')
sample_data_3 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_3[i*100]))
data, sampling_rate = librosa.load(sample_data_3[i*100])
plt.figure(figsize=(10, 2),facecolor="brown")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/TESS')
sample_data_4 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_4[i*100]))
data, sampling_rate = librosa.load(sample_data_4[i*100])
plt.figure(figsize=(10, 2),facecolor="gray")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
# Load Sample Data
print("\nSample Data:")
print("============\n")
path = ('Datasets/RAVDESS')
sample_data_5 = [os.path.join(dp, f) for dp, dn, filenames in os.walk(path)
for f in filenames if os.path.splitext(f)[1] == '.wav']
print("\nDisplaying Some Instances from Sample Data: ")
print("===========================================\n")
for i in range(5):
print("Audio: ")
ipd.display(ipd.Audio(sample_data_5[i*100]))
data, sampling_rate = librosa.load(sample_data_5[i*100])
plt.figure(figsize=(10, 2),facecolor="yellow")
librosa.display.waveplot(data, sampling_rate)
Sample Data: ============ Displaying Some Instances from Sample Data: =========================================== Audio:
Audio:
Audio:
Audio:
Audio:
sample_data_mfcc_features_1 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_1))):
data, sampling_rate = librosa.load(sample_data_1[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_1[i][27:-6] == 'a'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_1[i][27:-6] == 'd'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_1[i][27:-6] == 'f'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_1[i][27:-6] == 'h'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_1[i][27:-6] == 'n'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_1[i][27:-6] == 'sa'):
mfcc_df["Emotion"]="Sadness"
elif(sample_data_1[i][27:-6] == 'su'):
mfcc_df["Emotion"]="Surprise"
else:
mfcc_df["Emotion"]="Other"
mfcc_df["Gender"] = "Male"
sample_data_mfcc_features_1 = sample_data_mfcc_features_1.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_1.to_csv('Audio Features/sample_data_mfcc_features_1.csv',index= False, mode='w', header=True)
# Load Features
features_1 = pd.read_csv('Audio Features/sample_data_mfcc_features_1.csv')
features_1
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 3.203335 | 13.795782 | -19.908137 | 23.113571 | -41.785224 | 5.834210 | -52.924076 | -2.088703 | -14.617876 | 4.936264 | -15.795315 | 11.496223 | -4.460716 | Anger | Male |
| 1 | 3.446474 | 23.732635 | -30.598307 | 4.388828 | -28.666846 | 13.383600 | -52.968102 | 4.408833 | 2.814173 | -10.741523 | -14.331456 | 11.751246 | -8.396209 | Anger | Male |
| 2 | 3.476206 | 17.949951 | -30.643702 | 4.392290 | -35.739255 | -8.915932 | -46.324606 | 6.564549 | -7.061853 | -16.553248 | -16.938792 | 8.475676 | -20.742700 | Anger | Male |
| 3 | 3.558089 | 18.276150 | -20.346620 | -1.583971 | -30.147299 | 0.963494 | -54.361284 | -9.811369 | -10.147195 | -11.144327 | -1.306936 | 7.505639 | 0.572708 | Anger | Male |
| 4 | 3.764346 | 18.764859 | -24.920929 | 1.265585 | -34.814864 | -3.281257 | -50.514474 | -1.195924 | -6.026913 | -8.751210 | -12.911233 | 8.015905 | -6.634611 | Anger | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 475 | -2.891536 | 9.977441 | -8.494620 | 20.642597 | -43.986634 | -5.818822 | -42.747704 | 1.623718 | -5.636403 | -5.261166 | -19.011852 | 10.283520 | -0.037808 | Surprise | Male |
| 476 | -2.373482 | 13.664592 | -14.080212 | 22.486546 | -38.038503 | -3.001456 | -38.062197 | -3.495917 | -15.125975 | -11.673487 | -15.574197 | -0.432495 | -6.982044 | Surprise | Male |
| 477 | -2.244675 | 21.429362 | -11.647796 | 9.177692 | -29.520397 | -15.366208 | -43.935295 | 10.374315 | -28.431255 | -12.655181 | -9.804251 | -1.886984 | 4.831945 | Surprise | Male |
| 478 | -1.600036 | 13.877167 | -13.671807 | 13.773882 | -43.390877 | -1.317022 | -45.268181 | -2.391161 | -21.976652 | -9.785898 | -4.528661 | 14.398901 | -6.945704 | Surprise | Male |
| 479 | -1.418079 | 12.886012 | -9.240951 | 11.856341 | -56.535884 | 2.798735 | -38.455837 | 0.368887 | -21.125617 | -7.183570 | -11.136310 | 10.025685 | -2.063207 | Surprise | Male |
480 rows × 15 columns
sample_data_mfcc_features_2 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_2))):
data, sampling_rate = librosa.load(sample_data_2[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_2[i][24:25] == 'W'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_2[i][24:25] == 'E'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_2[i][24:25] == 'A'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_2[i][24:25] == 'F'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_2[i][24:25] == 'N'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_2[i][24:25] == 'T'):
mfcc_df["Emotion"]="Sadness"
elif(sample_data_2[i][24:25] == 'L'):
mfcc_df["Emotion"]="Boredom"
else:
mfcc_df["Emotion"]="Other"
if(sample_data_2[i][19:21] == '03' or
sample_data_2[i][19:21] == '10' or
sample_data_2[i][19:21] == '11' or
sample_data_2[i][19:21] == '12' or
sample_data_2[i][19:21] == '15' ):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_2 = sample_data_mfcc_features_2.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_2.to_csv('Audio Features/sample_data_mfcc_features_2.csv',index= False, mode='w', header=True)
# Load Features
features_2 = pd.read_csv('Audio Features/sample_data_mfcc_features_2.csv')
features_2
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2.581027 | 15.767412 | -47.700512 | 35.579753 | -70.270488 | 46.800920 | -58.114879 | 37.216890 | -49.564047 | 31.066088 | -17.529903 | 3.279682 | -12.838690 | Happiness | Male |
| 1 | 2.795378 | 24.843764 | -51.506450 | 49.454440 | -72.968111 | 55.298546 | -74.268759 | 41.864338 | -59.045060 | 38.514404 | 0.004737 | 10.388288 | -14.279588 | Neutral | Male |
| 2 | 4.170786 | 14.397566 | -71.067858 | 40.482368 | -84.346462 | 58.019575 | -81.388417 | 53.770739 | -75.670880 | 40.587622 | -35.743369 | 21.891265 | -25.008990 | Anger | Male |
| 3 | 3.337494 | 16.282372 | -50.880033 | 31.990281 | -85.102191 | 50.261715 | -84.640713 | 46.940764 | -82.094347 | 32.629892 | -49.312384 | 7.776564 | -22.946343 | Happiness | Male |
| 4 | 2.707667 | 24.334550 | -45.717779 | 53.691805 | -71.204936 | 54.309667 | -80.084345 | 44.830838 | -61.581425 | 38.328134 | -23.163505 | 17.898031 | -5.970665 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 530 | 3.863628 | 19.842292 | -52.308186 | 48.280323 | -72.358978 | 54.636782 | -87.941838 | 44.757594 | -59.136477 | 49.136846 | -35.821170 | 15.900585 | -32.775497 | Boredom | Female |
| 531 | 4.198406 | 4.509584 | -34.055637 | 48.250139 | -48.483522 | 48.624353 | -72.094906 | 19.418505 | -74.377215 | 30.325926 | -38.960475 | -1.243505 | -27.763525 | Sadness | Female |
| 532 | 3.450846 | 1.809250 | -26.119918 | 58.439039 | -41.793259 | 43.821996 | -58.959182 | 33.575929 | -56.189126 | 32.773332 | -32.477892 | 8.302056 | -14.578023 | Sadness | Female |
| 533 | 3.457763 | 7.503398 | -88.067662 | 40.491302 | -88.680502 | 46.283885 | -81.724622 | 27.269826 | -67.302466 | 10.171340 | -50.524035 | 11.235342 | -32.179386 | Anger | Female |
| 534 | 4.445711 | 8.212223 | -81.494063 | 45.600778 | -89.101499 | 32.327346 | -92.760920 | 36.413223 | -79.412132 | 31.038278 | -60.340300 | 26.689882 | -26.373804 | Anger | Female |
535 rows × 15 columns
sample_data_mfcc_features_3 = pd.DataFrame()
female = [1002,1003,1004,1006,1007,1008,1009,1010,1012,1013,1018,1020,1021,1024,1025,1028,1029,1030,1037,1043,1046,1047,1049,
1052,1053,1054,1055,1056,1058,1060,1061,1063,1072,1073,1074,1075,1076,1078,1079,1082,1084,1089,1091]
for i in notebook.tqdm(range(len(sample_data_3))):
data, sampling_rate = librosa.load(sample_data_3[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_3[i][26:29] == 'ANG'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_3[i][26:29] == 'DIS'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_3[i][26:29] == 'FEA'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_3[i][26:29] == 'HAP'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_3[i][26:29] == 'NEU'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_3[i][26:29] == 'SAD'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(int(sample_data_3[i][17:].split('_')[0]) in female):
mfcc_df["Gender"]="Female"
else:
mfcc_df["Gender"]="Male"
sample_data_mfcc_features_3 = sample_data_mfcc_features_3.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_3.to_csv('Audio Features/sample_data_mfcc_features_3.csv',index= False, mode='w', header=True)
# Load Features
features_3 = pd.read_csv('Audio Features/sample_data_mfcc_features_3.csv')
features_3
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2.368902 | 23.902528 | -65.345819 | 44.065375 | -74.662445 | 51.996950 | -88.516855 | 53.855236 | -78.723548 | 69.285800 | -59.639412 | 39.177042 | -23.609966 | Anger | Male |
| 1 | 0.307114 | 13.163935 | -48.153023 | 33.389668 | -49.739224 | 34.304775 | -69.983706 | 35.961724 | -49.892039 | 39.105544 | -31.849578 | 27.108298 | 2.069042 | Disgust | Male |
| 2 | 2.603431 | 10.167095 | -47.088421 | 13.140881 | -51.376008 | 24.318749 | -71.462424 | 20.749077 | -50.124086 | 37.938860 | -31.598722 | 8.348972 | -5.282255 | Fear | Male |
| 3 | 2.140128 | 11.848147 | -47.720734 | 23.389854 | -52.047988 | 23.094227 | -65.452202 | 24.289967 | -50.372034 | 35.510180 | -35.682490 | 11.530250 | 8.193065 | Happiness | Male |
| 4 | 0.473232 | 12.135419 | -43.704434 | 31.896648 | -43.032738 | 21.278558 | -59.340704 | 9.792648 | -45.896816 | 39.344912 | -31.064415 | 19.842668 | 10.652224 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 7437 | -0.569248 | 7.090084 | -21.821569 | 21.218883 | -70.987375 | 58.362293 | -53.937357 | 59.089649 | -57.824928 | 26.901631 | -25.225629 | 5.655281 | -13.951756 | Disgust | Female |
| 7438 | 0.275142 | -7.106827 | -12.841243 | 33.026974 | -53.708565 | 49.734721 | -53.718731 | 28.586449 | -38.315850 | 26.497463 | -24.172343 | -5.992458 | -15.062148 | Fear | Female |
| 7439 | 0.081269 | 6.700789 | -24.036068 | 18.961563 | -59.892895 | 46.793140 | -65.963296 | 37.240255 | -53.570069 | 28.635201 | -29.254545 | -2.764532 | -8.967688 | Happiness | Female |
| 7440 | -0.233033 | 8.786678 | -29.184771 | 14.561491 | -73.676943 | 47.898016 | -35.411657 | 53.926727 | -62.481850 | 23.092826 | -16.729014 | 1.414838 | -16.098669 | Neutral | Female |
| 7441 | -0.907600 | 5.432492 | -20.481048 | 23.180579 | -67.214288 | 55.307229 | -53.056532 | 48.972602 | -48.267778 | 34.530610 | -17.210164 | -2.257076 | -11.995452 | Sadness | Female |
7442 rows × 15 columns
sample_data_mfcc_features_4 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_4))):
data, sampling_rate = librosa.load(sample_data_4[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_4[i].split('_')[2][:-4] == 'angry'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_4[i].split('_')[2][:-4] == 'disgust'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_4[i].split('_')[2][:-4] == 'fear'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_4[i].split('_')[2][:-4] == 'happy'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_4[i].split('_')[2][:-4] == 'neutral'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_4[i].split('_')[2][:-4] == 'ps'):
mfcc_df["Emotion"]="Surprise"
elif(sample_data_4[i].split('_')[2][:-4] == 'sad'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(sample_data_4[i][14:].split("_")[0] == 'OAF'):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_4 = sample_data_mfcc_features_4.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_4.to_csv('Audio Features/sample_data_mfcc_features_4.csv',index= False, mode='w', header=True)
# Load Features
features_4 = pd.read_csv('Audio Features/sample_data_mfcc_features_4.csv')
features_4
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.220228 | -6.288021 | -23.058824 | -42.426226 | -32.405139 | -2.121633 | -32.409469 | -19.628282 | -12.538443 | -8.152727 | 8.038784 | 9.805581 | -8.588909 | Anger | Male |
| 1 | -0.977203 | -4.524827 | -8.214732 | -29.952360 | -8.692126 | -8.750833 | -22.322727 | -14.788581 | -4.259685 | -10.710336 | -10.794590 | -6.866066 | -7.855454 | Disgust | Male |
| 2 | 0.054579 | -3.640480 | -8.949627 | -38.282103 | -26.989143 | -16.127692 | -31.411542 | -17.400726 | -23.480958 | -4.422475 | -20.796203 | -9.274272 | -12.435063 | Fear | Male |
| 3 | -0.666131 | -4.405894 | -1.385126 | -29.719358 | -15.141410 | 0.250221 | -31.969870 | -16.251581 | -9.591181 | -11.897906 | -3.236711 | -10.320902 | -0.489421 | Happiness | Male |
| 4 | -2.182739 | -4.278151 | -1.558268 | -21.337291 | -21.234993 | -10.585688 | -17.260342 | -23.917076 | -15.028336 | -1.602557 | -13.839702 | -1.707235 | -11.335032 | Neutral | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 2795 | 1.889912 | -9.972459 | -19.383733 | -16.514340 | 3.237600 | -40.164680 | -21.195187 | -36.141423 | -33.244664 | -11.505766 | -13.660636 | 4.744601 | -3.159517 | Fear | Female |
| 2796 | 3.691365 | -16.734092 | -30.044690 | -8.298291 | -18.335224 | -40.469660 | -37.680667 | -54.758465 | 0.051854 | -16.442842 | -20.674230 | 6.932490 | -33.305323 | Happiness | Female |
| 2797 | 0.734736 | -11.863582 | -6.351481 | -8.360497 | -4.854154 | -32.655983 | -13.512118 | -33.856513 | -16.572519 | -2.119042 | -19.188326 | -4.721083 | -16.395590 | Neutral | Female |
| 2798 | 0.830298 | -14.009006 | -14.546528 | -22.484116 | -20.190596 | -34.711502 | -20.611750 | -41.547508 | -24.357789 | -9.737252 | -4.848874 | -0.929159 | -17.349877 | Surprise | Female |
| 2799 | 1.380444 | -14.983023 | 12.742822 | -0.159556 | -5.243630 | -32.397391 | -11.955386 | -27.468668 | -13.824717 | -9.303697 | -8.919692 | 1.806700 | -19.386223 | Sadness | Female |
2800 rows × 15 columns
sample_data_mfcc_features_5 = pd.DataFrame()
for i in notebook.tqdm(range(len(sample_data_5))):
data, sampling_rate = librosa.load(sample_data_5[i])
# Get MFCC Features
mfcc_features = mfcc(data,sampling_rate,winlen=30,nfft=661500)
# Convert to DataFrame
mfcc_df = pd.DataFrame(mfcc_features)
if(sample_data_5[i][26:].split('-')[2] == '05'):
mfcc_df["Emotion"]="Anger"
elif(sample_data_5[i][26:].split('-')[2] == '07'):
mfcc_df["Emotion"]="Disgust"
elif(sample_data_5[i][26:].split('-')[2] == '06'):
mfcc_df["Emotion"]="Fear"
elif(sample_data_5[i][26:].split('-')[2] == '03'):
mfcc_df["Emotion"]="Happiness"
elif(sample_data_5[i][26:].split('-')[2] == '01'):
mfcc_df["Emotion"]="Neutral"
elif(sample_data_5[i][26:].split('-')[2] == '08'):
mfcc_df["Emotion"]="Surprise"
elif(sample_data_5[i][26:].split('-')[2] == '04'):
mfcc_df["Emotion"]="Sadness"
else:
mfcc_df["Emotion"]="Other"
if(int(sample_data_5[i][26:].split('-')[6][:-4])%2 == 1):
mfcc_df["Gender"]="Male"
else:
mfcc_df["Gender"]="Female"
sample_data_mfcc_features_5 = sample_data_mfcc_features_5.append(mfcc_df)
# Save to CSV
sample_data_mfcc_features_5.to_csv('Audio Features/sample_data_mfcc_features_5.csv',index= False, mode='w', header=True)
# Load Features
features_5 = pd.read_csv('Audio Features/sample_data_mfcc_features_5.csv')
features_5
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -3.359473 | -2.146878 | -11.564124 | 1.891171 | -9.008204 | -10.801787 | -10.833871 | -15.028368 | -8.398560 | -9.163281 | 10.721436 | 4.608740 | 1.473425 | Neutral | Male |
| 1 | -3.329815 | -2.676908 | -7.403491 | 0.345950 | -9.717465 | -12.311797 | -5.109952 | -14.550771 | -11.636229 | -8.670857 | 10.890765 | 3.436975 | 2.016244 | Neutral | Male |
| 2 | -2.444367 | -7.687208 | -3.431780 | 0.146838 | -24.630812 | -7.976574 | -3.713535 | -15.636945 | -3.625836 | -14.900788 | 8.883719 | 8.981573 | 6.187388 | Neutral | Male |
| 3 | -2.511114 | -8.663157 | -2.275137 | -2.383363 | -22.228988 | -12.477076 | 0.356150 | -11.706877 | -1.475497 | -11.278868 | 8.459507 | 14.609624 | 1.657197 | Neutral | Male |
| 4 | -3.891415 | -5.680255 | -3.246147 | -0.504579 | -6.481897 | -0.268197 | -19.338007 | -15.519278 | -4.290236 | -7.981147 | 7.386760 | 1.352189 | 0.599136 | Other | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1435 | -0.020836 | -16.578694 | -20.212497 | -35.196251 | -41.204122 | -11.407926 | -28.910504 | -26.953338 | -28.814963 | -18.164941 | -22.130170 | -20.616013 | -21.943507 | Surprise | Female |
| 1436 | 0.895214 | -14.870234 | -24.218205 | -33.666994 | -30.253341 | -31.812652 | -12.578500 | -27.321653 | -18.336286 | -10.580679 | -31.383302 | -8.684033 | -16.206074 | Surprise | Female |
| 1437 | 0.696340 | -15.306912 | -23.957822 | -27.642590 | -27.104280 | -34.365583 | -12.110144 | -30.969206 | -18.769509 | -5.478475 | -22.061271 | -1.286473 | -12.172666 | Surprise | Female |
| 1438 | 2.295254 | -27.182203 | -12.448132 | -28.649016 | -35.397557 | -14.519178 | -28.478788 | -27.172286 | -24.487222 | -20.706953 | -13.494822 | -9.909802 | -19.705105 | Surprise | Female |
| 1439 | 1.574269 | -20.212374 | -5.617051 | -38.268733 | -36.260999 | -8.539351 | -21.161402 | -15.748326 | -30.735166 | -22.535320 | -13.359412 | -3.140330 | -1.425395 | Surprise | Female |
1440 rows × 15 columns
# Name of all 5 Datasets
frames = [features_1, features_2, features_3, features_4, features_5]
# Combined Dataset
combined = pd.concat(frames)
combined = combined.reset_index(drop=True)
# Save to CSV
combined.to_csv('Audio Features/combined_features.csv',index= False, mode='w', header=True)
combined
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Emotion | Gender | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 3.203335 | 13.795782 | -19.908137 | 23.113571 | -41.785224 | 5.834210 | -52.924076 | -2.088703 | -14.617876 | 4.936264 | -15.795315 | 11.496223 | -4.460716 | Anger | Male |
| 1 | 3.446474 | 23.732635 | -30.598307 | 4.388828 | -28.666846 | 13.383600 | -52.968102 | 4.408833 | 2.814173 | -10.741523 | -14.331456 | 11.751246 | -8.396209 | Anger | Male |
| 2 | 3.476206 | 17.949951 | -30.643702 | 4.392290 | -35.739255 | -8.915932 | -46.324606 | 6.564549 | -7.061853 | -16.553248 | -16.938792 | 8.475676 | -20.742700 | Anger | Male |
| 3 | 3.558089 | 18.276150 | -20.346620 | -1.583971 | -30.147299 | 0.963494 | -54.361284 | -9.811369 | -10.147195 | -11.144327 | -1.306936 | 7.505639 | 0.572708 | Anger | Male |
| 4 | 3.764346 | 18.764859 | -24.920929 | 1.265585 | -34.814864 | -3.281257 | -50.514474 | -1.195924 | -6.026913 | -8.751210 | -12.911233 | 8.015905 | -6.634611 | Anger | Male |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 12692 | -0.020836 | -16.578694 | -20.212497 | -35.196251 | -41.204122 | -11.407926 | -28.910504 | -26.953338 | -28.814963 | -18.164941 | -22.130170 | -20.616013 | -21.943507 | Surprise | Female |
| 12693 | 0.895214 | -14.870234 | -24.218205 | -33.666994 | -30.253341 | -31.812652 | -12.578500 | -27.321653 | -18.336286 | -10.580679 | -31.383302 | -8.684033 | -16.206074 | Surprise | Female |
| 12694 | 0.696340 | -15.306912 | -23.957822 | -27.642590 | -27.104280 | -34.365583 | -12.110144 | -30.969206 | -18.769509 | -5.478475 | -22.061271 | -1.286473 | -12.172666 | Surprise | Female |
| 12695 | 2.295254 | -27.182203 | -12.448132 | -28.649016 | -35.397557 | -14.519178 | -28.478788 | -27.172286 | -24.487222 | -20.706953 | -13.494822 | -9.909802 | -19.705105 | Surprise | Female |
| 12696 | 1.574269 | -20.212374 | -5.617051 | -38.268733 | -36.260999 | -8.539351 | -21.161402 | -15.748326 | -30.735166 | -22.535320 | -13.359412 | -3.140330 | -1.425395 | Surprise | Female |
12697 rows × 15 columns
# Plotting
plt.style.use('fivethirtyeight')
combined=pd.read_csv('Audio Features/combined_features.csv')
df_filtered = combined[combined['Emotion'] != "Boredom"]
combined = df_filtered[df_filtered['Emotion'] != "Other"]
figure(figsize=(14, 6), dpi=80)
plt.title('Count of Emotions', size=25)
sns.countplot(combined['Emotion'])
plt.ylabel('Count', size=14)
plt.xlabel('Emotions', size=14)
sns.despine(top=True, right=True, left=False, bottom=False)
plt.show()
# Plotting
figure(figsize=(7, 6), dpi=80)
plt.title('Count of Gender', size=25)
sns.countplot(combined['Gender'])
plt.ylabel('Count', size=14)
plt.xlabel('Genders', size=14)
sns.despine(top=True, right=True, left=False, bottom=False)
plt.show()
# Random Over Sampling
X = combined.drop(['Emotion'], axis = 1)
y = combined['Emotion']
ros = RandomOverSampler(random_state=64)
X_resampled, y_resampled = ros.fit_resample(X, y)
# Finding Frequency of each Class before and After Random Over Sampling
counter_1 = collections.Counter(y)
counter_2 = collections.Counter(y_resampled)
# Before Sampling
plt.figure()
fig = plt.gcf()
fig.set_size_inches(24, 5)
plt.subplot(1, 2, 1)
plt.bar(counter_1.keys(), counter_1.values())
plt.xticks([0,1,2,3,4,5,6])
plt.title("Before Sampling")
# After Sampling
plt.figure()
fig = plt.gcf()
fig.set_size_inches(24, 5)
plt.subplot(1, 2, 2)
plt.bar(counter_2.keys(), counter_2.values(), color = 'green')
plt.xticks([0,1,2,3,4,5,6])
plt.title("After Sampling")
plt.subplots_adjust(top=0.92, bottom=0.08, left=0.10, right=0.95, hspace=0.5,wspace=0.35)
plt.show()
# Scaling
y1 = y_resampled
y2 = X_resampled["Gender"]
X = X_resampled.drop(["Gender"],axis=1)
scaler = MinMaxScaler()
X_scaled = scaler.fit_transform(X)
print(y1.shape)
print(y2.shape)
print(X_scaled.shape)
print(X_scaled)
(14350,) (14350,) (14350, 13) [[0.93610433 0.65309955 0.62192149 ... 0.60099951 0.52473204 0.56745201] [0.94190358 0.7893224 0.53968236 ... 0.61542485 0.52713997 0.51623315] [0.94261276 0.71004844 0.53933314 ... 0.58973133 0.49621191 0.35554854] ... [0.90833302 0.15584516 0.68150021 ... 0.7242391 0.34603792 0.46732237] [0.89425745 0.28430183 0.62634095 ... 0.56568075 0.33727729 0.38896452] [0.83385165 0.40645843 0.78249741 ... 0.69382561 0.39112332 0.49386858]]
# Emotions
encoder_1 = preprocessing.LabelEncoder()
y1_encoded = encoder_1.fit_transform(y1)
# Gender
encoder_2 = preprocessing.LabelEncoder()
y2_encoded = encoder_2.fit_transform(y2)
labels=[]
i=0
while i<len(y1_encoded):
label=[]
label.append([y1_encoded[i]])
label.append([y2_encoded[i]])
labels.append(label)
i+=1
labels = np.array(labels)
X_train, X_test, Y_train, Y_test= train_test_split(X_scaled, labels, test_size=0.1, random_state = 64)
Y_train = [Y_train[:,1],Y_train[:,0]]
Y_test = [Y_test[:,1],Y_test[:,0]]
X_train = X_train
X_test = X_test
y1_train = Y_train[0]
y1_test = Y_test[0]
y2_train = Y_train[1]
y2_test = Y_test[1]
print(X_train.shape)
print(X_test.shape)
print(y1_train.shape)
print(y1_test.shape)
print(y2_train.shape)
print(y2_test.shape)
(12915, 13) (1435, 13) (12915, 1) (1435, 1) (12915, 1) (1435, 1)
df = pd.DataFrame(y1_train)
df["1"] = y2_train
y_train = np.array(df)
print(y_train)
[[0 6] [1 4] [1 0] ... [1 4] [1 3] [1 1]]
df = pd.DataFrame(y1_test)
df["1"] = y2_test
y_test = np.array(df)
print(y_test)
[[0 1] [0 4] [1 6] ... [1 4] [0 6] [0 4]]
x_train, x_test, y_train, y_test = X_train, X_test, y1_train, y1_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
model_1_g=Sequential()
model_1_g.add(Conv1D(512, kernel_size=5, strides=1, padding='same', activation='relu', input_shape=(x_train.shape[1], 1)))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Conv1D(128, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(Dropout(0.2))
model_1_g.add(Conv1D(64, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_g.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_g.add(GlobalAveragePooling1D())
model_1_g.add(Flatten())
model_1_g.add(Dense(units=32, activation='relu'))
model_1_g.add(Dropout(0.3))
model_1_g.add(Dense(units=2, activation='softmax'))
model_1_g.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
model_1_g.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv1d (Conv1D) (None, 13, 512) 3072 _________________________________________________________________ max_pooling1d (MaxPooling1D) (None, 7, 512) 0 _________________________________________________________________ conv1d_1 (Conv1D) (None, 7, 256) 655616 _________________________________________________________________ max_pooling1d_1 (MaxPooling1 (None, 4, 256) 0 _________________________________________________________________ conv1d_2 (Conv1D) (None, 4, 128) 163968 _________________________________________________________________ max_pooling1d_2 (MaxPooling1 (None, 2, 128) 0 _________________________________________________________________ dropout (Dropout) (None, 2, 128) 0 _________________________________________________________________ conv1d_3 (Conv1D) (None, 2, 64) 41024 _________________________________________________________________ max_pooling1d_3 (MaxPooling1 (None, 1, 64) 0 _________________________________________________________________ global_average_pooling1d (Gl (None, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 64) 0 _________________________________________________________________ dense (Dense) (None, 32) 2080 _________________________________________________________________ dropout_1 (Dropout) (None, 32) 0 _________________________________________________________________ dense_1 (Dense) (None, 2) 66 ================================================================= Total params: 865,826 Trainable params: 865,826 Non-trainable params: 0 _________________________________________________________________
# Training
rlrp = ReduceLROnPlateau(monitor='loss', factor=0.4, verbose=0, patience=2, min_lr=0.0000001)
history_1_g = model_1_g.fit(x_train, y_train, batch_size=64, epochs=250, validation_data=(x_test, y_test), callbacks=[rlrp])
Epoch 1/250 202/202 [==============================] - 42s 207ms/step - loss: 0.5645 - accuracy: 0.6802 - val_loss: 0.5028 - val_accuracy: 0.7185 Epoch 2/250 202/202 [==============================] - 43s 213ms/step - loss: 0.4373 - accuracy: 0.7782 - val_loss: 0.4200 - val_accuracy: 0.7735 Epoch 3/250 202/202 [==============================] - 48s 236ms/step - loss: 0.3995 - accuracy: 0.8050 - val_loss: 0.3989 - val_accuracy: 0.8028 Epoch 4/250 202/202 [==============================] - 55s 272ms/step - loss: 0.3805 - accuracy: 0.8184 - val_loss: 0.3773 - val_accuracy: 0.8188 Epoch 5/250 202/202 [==============================] - 54s 269ms/step - loss: 0.3583 - accuracy: 0.8274 - val_loss: 0.3656 - val_accuracy: 0.8237 Epoch 6/250 202/202 [==============================] - 65s 323ms/step - loss: 0.3460 - accuracy: 0.8355 - val_loss: 0.3502 - val_accuracy: 0.8272 Epoch 7/250 202/202 [==============================] - 58s 286ms/step - loss: 0.3328 - accuracy: 0.8455 - val_loss: 0.3486 - val_accuracy: 0.8383 Epoch 8/250 202/202 [==============================] - 53s 265ms/step - loss: 0.3277 - accuracy: 0.8458 - val_loss: 0.3520 - val_accuracy: 0.8362 Epoch 9/250 202/202 [==============================] - 42s 208ms/step - loss: 0.3101 - accuracy: 0.8601 - val_loss: 0.3322 - val_accuracy: 0.8362 Epoch 10/250 202/202 [==============================] - 38s 188ms/step - loss: 0.3088 - accuracy: 0.8598 - val_loss: 0.3044 - val_accuracy: 0.8627 Epoch 11/250 202/202 [==============================] - 39s 192ms/step - loss: 0.3034 - accuracy: 0.8612 - val_loss: 0.3046 - val_accuracy: 0.8551 Epoch 12/250 202/202 [==============================] - 37s 185ms/step - loss: 0.2932 - accuracy: 0.8697 - val_loss: 0.3013 - val_accuracy: 0.8544 Epoch 13/250 202/202 [==============================] - 47s 232ms/step - loss: 0.2923 - accuracy: 0.8708 - val_loss: 0.2989 - val_accuracy: 0.8592 Epoch 14/250 202/202 [==============================] - 45s 223ms/step - loss: 0.2794 - accuracy: 0.8740 - val_loss: 0.2998 - val_accuracy: 0.8578 Epoch 15/250 202/202 [==============================] - 52s 258ms/step - loss: 0.2713 - accuracy: 0.8777 - val_loss: 0.3005 - val_accuracy: 0.8746 Epoch 16/250 202/202 [==============================] - 69s 339ms/step - loss: 0.2688 - accuracy: 0.8791 - val_loss: 0.2858 - val_accuracy: 0.8808 Epoch 17/250 202/202 [==============================] - 41s 203ms/step - loss: 0.2594 - accuracy: 0.8860 - val_loss: 0.2897 - val_accuracy: 0.8704 Epoch 18/250 202/202 [==============================] - 39s 195ms/step - loss: 0.2561 - accuracy: 0.8869 - val_loss: 0.2715 - val_accuracy: 0.8801 Epoch 19/250 202/202 [==============================] - 40s 199ms/step - loss: 0.2578 - accuracy: 0.8848 - val_loss: 0.2958 - val_accuracy: 0.8599 Epoch 20/250 202/202 [==============================] - 40s 199ms/step - loss: 0.2440 - accuracy: 0.8924 - val_loss: 0.3527 - val_accuracy: 0.8000 Epoch 21/250 202/202 [==============================] - 40s 197ms/step - loss: 0.2404 - accuracy: 0.8948 - val_loss: 0.3081 - val_accuracy: 0.8662 Epoch 22/250 202/202 [==============================] - 50s 249ms/step - loss: 0.2357 - accuracy: 0.8945 - val_loss: 0.2574 - val_accuracy: 0.8885 Epoch 23/250 202/202 [==============================] - 47s 233ms/step - loss: 0.2418 - accuracy: 0.8945 - val_loss: 0.2533 - val_accuracy: 0.8878 Epoch 24/250 202/202 [==============================] - 48s 237ms/step - loss: 0.2305 - accuracy: 0.8964 - val_loss: 0.2608 - val_accuracy: 0.8892 Epoch 25/250 202/202 [==============================] - 47s 234ms/step - loss: 0.2266 - accuracy: 0.9002 - val_loss: 0.3107 - val_accuracy: 0.8530 Epoch 26/250 202/202 [==============================] - 48s 239ms/step - loss: 0.2278 - accuracy: 0.8966 - val_loss: 0.2671 - val_accuracy: 0.8836 Epoch 27/250 202/202 [==============================] - 52s 257ms/step - loss: 0.2164 - accuracy: 0.9092 - val_loss: 0.2744 - val_accuracy: 0.8718 Epoch 28/250 202/202 [==============================] - 55s 272ms/step - loss: 0.2197 - accuracy: 0.9020 - val_loss: 0.2615 - val_accuracy: 0.8760 Epoch 29/250 202/202 [==============================] - 50s 247ms/step - loss: 0.2118 - accuracy: 0.9068 - val_loss: 0.2535 - val_accuracy: 0.8899 Epoch 30/250 202/202 [==============================] - 51s 253ms/step - loss: 0.2094 - accuracy: 0.9111 - val_loss: 0.2456 - val_accuracy: 0.8976 Epoch 31/250 202/202 [==============================] - 41s 204ms/step - loss: 0.2007 - accuracy: 0.9132 - val_loss: 0.2331 - val_accuracy: 0.8955 Epoch 32/250 202/202 [==============================] - 41s 205ms/step - loss: 0.2003 - accuracy: 0.9131 - val_loss: 0.2396 - val_accuracy: 0.9024 Epoch 33/250 202/202 [==============================] - 48s 237ms/step - loss: 0.2024 - accuracy: 0.9105 - val_loss: 0.2479 - val_accuracy: 0.8899 Epoch 34/250 202/202 [==============================] - 49s 244ms/step - loss: 0.1929 - accuracy: 0.9172 - val_loss: 0.2347 - val_accuracy: 0.9017 Epoch 35/250 202/202 [==============================] - 47s 233ms/step - loss: 0.1954 - accuracy: 0.9162 - val_loss: 0.2692 - val_accuracy: 0.8843 Epoch 36/250 202/202 [==============================] - 43s 215ms/step - loss: 0.1864 - accuracy: 0.9168 - val_loss: 0.2473 - val_accuracy: 0.8990 Epoch 37/250 202/202 [==============================] - 45s 222ms/step - loss: 0.1906 - accuracy: 0.9181 - val_loss: 0.2510 - val_accuracy: 0.8955 Epoch 38/250 202/202 [==============================] - 40s 198ms/step - loss: 0.1835 - accuracy: 0.9212 - val_loss: 0.2407 - val_accuracy: 0.9045 Epoch 39/250 202/202 [==============================] - 42s 206ms/step - loss: 0.1844 - accuracy: 0.9202 - val_loss: 0.2316 - val_accuracy: 0.9059 Epoch 40/250 202/202 [==============================] - 39s 193ms/step - loss: 0.1841 - accuracy: 0.9216 - val_loss: 0.2383 - val_accuracy: 0.9031 Epoch 41/250 202/202 [==============================] - 47s 231ms/step - loss: 0.1486 - accuracy: 0.9387 - val_loss: 0.2286 - val_accuracy: 0.9157 Epoch 42/250 202/202 [==============================] - 41s 201ms/step - loss: 0.1316 - accuracy: 0.9441 - val_loss: 0.2316 - val_accuracy: 0.9199 Epoch 43/250 202/202 [==============================] - 48s 236ms/step - loss: 0.1325 - accuracy: 0.9449 - val_loss: 0.2303 - val_accuracy: 0.9199 Epoch 44/250 202/202 [==============================] - 47s 231ms/step - loss: 0.1344 - accuracy: 0.9416 - val_loss: 0.2564 - val_accuracy: 0.9108 Epoch 45/250 202/202 [==============================] - 39s 194ms/step - loss: 0.1119 - accuracy: 0.9522 - val_loss: 0.2376 - val_accuracy: 0.9213 Epoch 46/250 202/202 [==============================] - 39s 195ms/step - loss: 0.1099 - accuracy: 0.9520 - val_loss: 0.2383 - val_accuracy: 0.9268 Epoch 47/250 202/202 [==============================] - 43s 211ms/step - loss: 0.1049 - accuracy: 0.9539 - val_loss: 0.2725 - val_accuracy: 0.9143 Epoch 48/250 202/202 [==============================] - 43s 214ms/step - loss: 0.1052 - accuracy: 0.9566 - val_loss: 0.2437 - val_accuracy: 0.9261 Epoch 49/250 202/202 [==============================] - 44s 218ms/step - loss: 0.1031 - accuracy: 0.9552 - val_loss: 0.2460 - val_accuracy: 0.9213 Epoch 50/250 202/202 [==============================] - 51s 251ms/step - loss: 0.0967 - accuracy: 0.9587 - val_loss: 0.2561 - val_accuracy: 0.9261 Epoch 51/250 202/202 [==============================] - 45s 221ms/step - loss: 0.1003 - accuracy: 0.9577 - val_loss: 0.2555 - val_accuracy: 0.9254 Epoch 52/250 202/202 [==============================] - 42s 207ms/step - loss: 0.0959 - accuracy: 0.9593 - val_loss: 0.2413 - val_accuracy: 0.9261 Epoch 53/250 202/202 [==============================] - 38s 188ms/step - loss: 0.0931 - accuracy: 0.9617 - val_loss: 0.2544 - val_accuracy: 0.9185 Epoch 54/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0935 - accuracy: 0.9597 - val_loss: 0.2617 - val_accuracy: 0.9157 Epoch 55/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0894 - accuracy: 0.9617 - val_loss: 0.2859 - val_accuracy: 0.9150 Epoch 56/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0923 - accuracy: 0.9617 - val_loss: 0.2652 - val_accuracy: 0.9275 Epoch 57/250 202/202 [==============================] - 40s 196ms/step - loss: 0.0887 - accuracy: 0.9617 - val_loss: 0.2605 - val_accuracy: 0.9206 Epoch 58/250 202/202 [==============================] - 44s 219ms/step - loss: 0.0874 - accuracy: 0.9629 - val_loss: 0.2667 - val_accuracy: 0.9254 Epoch 59/250 202/202 [==============================] - 40s 198ms/step - loss: 0.0842 - accuracy: 0.9636 - val_loss: 0.2639 - val_accuracy: 0.9296 Epoch 60/250 202/202 [==============================] - 39s 194ms/step - loss: 0.0865 - accuracy: 0.9650 - val_loss: 0.2629 - val_accuracy: 0.9303 Epoch 61/250 202/202 [==============================] - 39s 192ms/step - loss: 0.0869 - accuracy: 0.9638 - val_loss: 0.2552 - val_accuracy: 0.9247 Epoch 62/250 202/202 [==============================] - 39s 192ms/step - loss: 0.0750 - accuracy: 0.9695 - val_loss: 0.2659 - val_accuracy: 0.9289 Epoch 63/250 202/202 [==============================] - 40s 200ms/step - loss: 0.0710 - accuracy: 0.9707 - val_loss: 0.2690 - val_accuracy: 0.9261 Epoch 64/250 202/202 [==============================] - 42s 208ms/step - loss: 0.0725 - accuracy: 0.9709 - val_loss: 0.2793 - val_accuracy: 0.9282 Epoch 65/250 202/202 [==============================] - 41s 202ms/step - loss: 0.0695 - accuracy: 0.9714 - val_loss: 0.2776 - val_accuracy: 0.9226 Epoch 66/250 202/202 [==============================] - 41s 203ms/step - loss: 0.0706 - accuracy: 0.9708 - val_loss: 0.2794 - val_accuracy: 0.9275 Epoch 67/250 202/202 [==============================] - 41s 201ms/step - loss: 0.0669 - accuracy: 0.9732 - val_loss: 0.2830 - val_accuracy: 0.9226 Epoch 68/250 202/202 [==============================] - 40s 197ms/step - loss: 0.0687 - accuracy: 0.9707 - val_loss: 0.2848 - val_accuracy: 0.9247 Epoch 69/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0659 - accuracy: 0.9734 - val_loss: 0.2941 - val_accuracy: 0.9261 Epoch 70/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0666 - accuracy: 0.9724 - val_loss: 0.2886 - val_accuracy: 0.9352 Epoch 71/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0668 - accuracy: 0.9724 - val_loss: 0.2995 - val_accuracy: 0.9289 Epoch 72/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0614 - accuracy: 0.9752 - val_loss: 0.2957 - val_accuracy: 0.9352 Epoch 73/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0624 - accuracy: 0.9744 - val_loss: 0.2919 - val_accuracy: 0.9296 Epoch 74/250 202/202 [==============================] - 38s 189ms/step - loss: 0.0585 - accuracy: 0.9773 - val_loss: 0.2968 - val_accuracy: 0.9338 Epoch 75/250 202/202 [==============================] - 38s 189ms/step - loss: 0.0587 - accuracy: 0.9752 - val_loss: 0.3011 - val_accuracy: 0.9317 Epoch 76/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0614 - accuracy: 0.9759 - val_loss: 0.2992 - val_accuracy: 0.9345 Epoch 77/250 202/202 [==============================] - 38s 190ms/step - loss: 0.0590 - accuracy: 0.9772 - val_loss: 0.2977 - val_accuracy: 0.9324 Epoch 78/250 202/202 [==============================] - 38s 189ms/step - loss: 0.0591 - accuracy: 0.9754 - val_loss: 0.3000 - val_accuracy: 0.9317 Epoch 79/250 202/202 [==============================] - 38s 190ms/step - loss: 0.0563 - accuracy: 0.9771 - val_loss: 0.3013 - val_accuracy: 0.9324 Epoch 80/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0592 - accuracy: 0.9774 - val_loss: 0.3010 - val_accuracy: 0.9338 Epoch 81/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0590 - accuracy: 0.9755 - val_loss: 0.3013 - val_accuracy: 0.9345 Epoch 82/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0559 - accuracy: 0.9765 - val_loss: 0.3013 - val_accuracy: 0.9331 Epoch 83/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0547 - accuracy: 0.9776 - val_loss: 0.3017 - val_accuracy: 0.9331 Epoch 84/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0572 - accuracy: 0.9770 - val_loss: 0.3023 - val_accuracy: 0.9338 Epoch 85/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0568 - accuracy: 0.9774 - val_loss: 0.3024 - val_accuracy: 0.9338 Epoch 86/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0561 - accuracy: 0.9781 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 87/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0568 - accuracy: 0.9768 - val_loss: 0.3021 - val_accuracy: 0.9331 Epoch 88/250 202/202 [==============================] - 39s 191ms/step - loss: 0.0567 - accuracy: 0.9788 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 89/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0569 - accuracy: 0.9782 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 90/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0546 - accuracy: 0.9779 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 91/250 202/202 [==============================] - 46s 228ms/step - loss: 0.0556 - accuracy: 0.9784 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 92/250 202/202 [==============================] - 42s 209ms/step - loss: 0.0583 - accuracy: 0.9775 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 93/250 202/202 [==============================] - 40s 197ms/step - loss: 0.0578 - accuracy: 0.9767 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 94/250 202/202 [==============================] - 38s 189ms/step - loss: 0.0575 - accuracy: 0.9770 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 95/250 202/202 [==============================] - 48s 238ms/step - loss: 0.0578 - accuracy: 0.9775 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 96/250 202/202 [==============================] - 51s 251ms/step - loss: 0.0558 - accuracy: 0.9781 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 97/250 202/202 [==============================] - 46s 226ms/step - loss: 0.0556 - accuracy: 0.9786 - val_loss: 0.3022 - val_accuracy: 0.9331 Epoch 98/250 202/202 [==============================] - 51s 254ms/step - loss: 0.0567 - accuracy: 0.9781 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 99/250 202/202 [==============================] - 46s 227ms/step - loss: 0.0552 - accuracy: 0.9782 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 100/250 202/202 [==============================] - 39s 192ms/step - loss: 0.0571 - accuracy: 0.9767 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 101/250 202/202 [==============================] - 38s 187ms/step - loss: 0.0580 - accuracy: 0.9773 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 102/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0559 - accuracy: 0.9779 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 103/250 202/202 [==============================] - 44s 218ms/step - loss: 0.0558 - accuracy: 0.9771 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 104/250 202/202 [==============================] - 48s 238ms/step - loss: 0.0552 - accuracy: 0.9784 - val_loss: 0.3024 - val_accuracy: 0.9324 Epoch 105/250 202/202 [==============================] - 46s 225ms/step - loss: 0.0558 - accuracy: 0.9782 - val_loss: 0.3024 - val_accuracy: 0.9324 Epoch 106/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0568 - accuracy: 0.9776 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 107/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0575 - accuracy: 0.9776 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 108/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0557 - accuracy: 0.9791 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 109/250 202/202 [==============================] - 40s 196ms/step - loss: 0.0564 - accuracy: 0.9781 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 110/250 202/202 [==============================] - 38s 187ms/step - loss: 0.0564 - accuracy: 0.9782 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 111/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0562 - accuracy: 0.9779 - val_loss: 0.3023 - val_accuracy: 0.9331 Epoch 112/250 202/202 [==============================] - 42s 206ms/step - loss: 0.0558 - accuracy: 0.9781 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 113/250 202/202 [==============================] - 42s 206ms/step - loss: 0.0555 - accuracy: 0.9780 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 114/250 202/202 [==============================] - 43s 215ms/step - loss: 0.0584 - accuracy: 0.9775 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 115/250 202/202 [==============================] - 44s 218ms/step - loss: 0.0561 - accuracy: 0.9789 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 116/250 202/202 [==============================] - 49s 243ms/step - loss: 0.0564 - accuracy: 0.9764 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 117/250 202/202 [==============================] - 44s 220ms/step - loss: 0.0570 - accuracy: 0.9776 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 118/250 202/202 [==============================] - 50s 246ms/step - loss: 0.0575 - accuracy: 0.9769 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 119/250 202/202 [==============================] - 42s 210ms/step - loss: 0.0572 - accuracy: 0.9773 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 120/250 202/202 [==============================] - 45s 222ms/step - loss: 0.0572 - accuracy: 0.9779 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 121/250 202/202 [==============================] - 42s 206ms/step - loss: 0.0565 - accuracy: 0.9782 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 122/250 202/202 [==============================] - 51s 255ms/step - loss: 0.0563 - accuracy: 0.9775 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 123/250 202/202 [==============================] - 51s 254ms/step - loss: 0.0550 - accuracy: 0.9792 - val_loss: 0.3024 - val_accuracy: 0.9331 Epoch 124/250 202/202 [==============================] - 64s 314ms/step - loss: 0.0569 - accuracy: 0.9774 - val_loss: 0.3024 - val_accuracy: 0.9324 Epoch 125/250 202/202 [==============================] - 52s 256ms/step - loss: 0.0558 - accuracy: 0.9779 - val_loss: 0.3024 - val_accuracy: 0.9324 Epoch 126/250 202/202 [==============================] - 44s 220ms/step - loss: 0.0564 - accuracy: 0.9770 - val_loss: 0.3025 - val_accuracy: 0.9324 Epoch 127/250 202/202 [==============================] - 43s 215ms/step - loss: 0.0568 - accuracy: 0.9776 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 128/250 202/202 [==============================] - 39s 194ms/step - loss: 0.0581 - accuracy: 0.9767 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 129/250 202/202 [==============================] - 39s 192ms/step - loss: 0.0575 - accuracy: 0.9771 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 130/250 202/202 [==============================] - 40s 197ms/step - loss: 0.0560 - accuracy: 0.9778 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 131/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0561 - accuracy: 0.9776 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 132/250 202/202 [==============================] - 38s 189ms/step - loss: 0.0548 - accuracy: 0.9786 - val_loss: 0.3025 - val_accuracy: 0.9331 Epoch 133/250 202/202 [==============================] - 38s 188ms/step - loss: 0.0571 - accuracy: 0.9776 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 134/250 202/202 [==============================] - 38s 187ms/step - loss: 0.0566 - accuracy: 0.9783 - val_loss: 0.3026 - val_accuracy: 0.9324 Epoch 135/250 202/202 [==============================] - 42s 207ms/step - loss: 0.0553 - accuracy: 0.9774 - val_loss: 0.3026 - val_accuracy: 0.9324 Epoch 136/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0561 - accuracy: 0.9783 - val_loss: 0.3026 - val_accuracy: 0.9324 Epoch 137/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0557 - accuracy: 0.9788 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 138/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0567 - accuracy: 0.9781 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 139/250 202/202 [==============================] - 39s 192ms/step - loss: 0.0559 - accuracy: 0.9776 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 140/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0556 - accuracy: 0.9775 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 141/250 202/202 [==============================] - 36s 180ms/step - loss: 0.0549 - accuracy: 0.9779 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 142/250 202/202 [==============================] - 42s 208ms/step - loss: 0.0551 - accuracy: 0.9779 - val_loss: 0.3027 - val_accuracy: 0.9331 Epoch 143/250 202/202 [==============================] - 38s 187ms/step - loss: 0.0571 - accuracy: 0.9768 - val_loss: 0.3026 - val_accuracy: 0.9331 Epoch 144/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0559 - accuracy: 0.9776 - val_loss: 0.3027 - val_accuracy: 0.9331 Epoch 145/250 202/202 [==============================] - 36s 180ms/step - loss: 0.0549 - accuracy: 0.9787 - val_loss: 0.3027 - val_accuracy: 0.9331 Epoch 146/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0557 - accuracy: 0.9779 - val_loss: 0.3027 - val_accuracy: 0.9324 Epoch 147/250 202/202 [==============================] - 36s 180ms/step - loss: 0.0567 - accuracy: 0.9779 - val_loss: 0.3027 - val_accuracy: 0.9324 Epoch 148/250 202/202 [==============================] - 36s 180ms/step - loss: 0.0579 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 149/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0558 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 150/250 202/202 [==============================] - 44s 217ms/step - loss: 0.0562 - accuracy: 0.9778 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 151/250 202/202 [==============================] - 41s 202ms/step - loss: 0.0561 - accuracy: 0.9777 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 152/250 202/202 [==============================] - 40s 200ms/step - loss: 0.0564 - accuracy: 0.9765 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 153/250 202/202 [==============================] - 40s 198ms/step - loss: 0.0568 - accuracy: 0.9769 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 154/250 202/202 [==============================] - 41s 203ms/step - loss: 0.0564 - accuracy: 0.9781 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 155/250 202/202 [==============================] - 40s 197ms/step - loss: 0.0565 - accuracy: 0.9779 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 156/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0568 - accuracy: 0.9768 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 157/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0560 - accuracy: 0.9776 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 158/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0555 - accuracy: 0.9773 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 159/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0571 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 160/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0562 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 161/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0556 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 162/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0556 - accuracy: 0.9783 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 163/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0554 - accuracy: 0.9779 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 164/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0573 - accuracy: 0.9761 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 165/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0560 - accuracy: 0.9768 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 166/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0559 - accuracy: 0.9779 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 167/250 202/202 [==============================] - 35s 172ms/step - loss: 0.0559 - accuracy: 0.9769 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 168/250 202/202 [==============================] - 34s 170ms/step - loss: 0.0551 - accuracy: 0.9779 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 169/250 202/202 [==============================] - 34s 168ms/step - loss: 0.0559 - accuracy: 0.9772 - val_loss: 0.3028 - val_accuracy: 0.9324 Epoch 170/250 202/202 [==============================] - 34s 168ms/step - loss: 0.0566 - accuracy: 0.9783 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 171/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0568 - accuracy: 0.9765 - val_loss: 0.3028 - val_accuracy: 0.9331 Epoch 172/250 202/202 [==============================] - 34s 170ms/step - loss: 0.0554 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 173/250 202/202 [==============================] - 34s 168ms/step - loss: 0.0569 - accuracy: 0.9772 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 174/250 202/202 [==============================] - 34s 168ms/step - loss: 0.0566 - accuracy: 0.9779 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 175/250 202/202 [==============================] - 35s 171ms/step - loss: 0.0557 - accuracy: 0.9769 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 176/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0561 - accuracy: 0.9783 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 177/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0551 - accuracy: 0.9782 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 178/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0577 - accuracy: 0.9769 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 179/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0560 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 180/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0562 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9324 Epoch 181/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0555 - accuracy: 0.9774 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 182/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0551 - accuracy: 0.9782 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 183/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0564 - accuracy: 0.9783 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 184/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0553 - accuracy: 0.9773 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 185/250 202/202 [==============================] - 34s 168ms/step - loss: 0.0584 - accuracy: 0.9767 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 186/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0570 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 187/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0567 - accuracy: 0.9772 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 188/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0564 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 189/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0567 - accuracy: 0.9776 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 190/250 202/202 [==============================] - 34s 169ms/step - loss: 0.0577 - accuracy: 0.9770 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 191/250 202/202 [==============================] - 34s 167ms/step - loss: 0.0588 - accuracy: 0.9761 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 192/250 202/202 [==============================] - 36s 179ms/step - loss: 0.0559 - accuracy: 0.9779 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 193/250 202/202 [==============================] - 48s 236ms/step - loss: 0.0566 - accuracy: 0.9776 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 194/250 202/202 [==============================] - 49s 243ms/step - loss: 0.0552 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 195/250 202/202 [==============================] - 48s 238ms/step - loss: 0.0560 - accuracy: 0.9777 - val_loss: 0.3030 - val_accuracy: 0.9324 Epoch 196/250 202/202 [==============================] - 46s 230ms/step - loss: 0.0546 - accuracy: 0.9783 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 197/250 202/202 [==============================] - 46s 229ms/step - loss: 0.0581 - accuracy: 0.9767 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 198/250 202/202 [==============================] - 47s 233ms/step - loss: 0.0552 - accuracy: 0.9784 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 199/250 202/202 [==============================] - 47s 230ms/step - loss: 0.0564 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 200/250 202/202 [==============================] - 48s 235ms/step - loss: 0.0554 - accuracy: 0.9774 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 201/250 202/202 [==============================] - 48s 237ms/step - loss: 0.0566 - accuracy: 0.9775 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 202/250 202/202 [==============================] - 48s 237ms/step - loss: 0.0553 - accuracy: 0.9772 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 203/250 202/202 [==============================] - 48s 237ms/step - loss: 0.0556 - accuracy: 0.9775 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 204/250 202/202 [==============================] - 47s 235ms/step - loss: 0.0558 - accuracy: 0.9784 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 205/250 202/202 [==============================] - 48s 237ms/step - loss: 0.0557 - accuracy: 0.9783 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 206/250 202/202 [==============================] - 49s 244ms/step - loss: 0.0546 - accuracy: 0.9770 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 207/250 202/202 [==============================] - 46s 226ms/step - loss: 0.0564 - accuracy: 0.9772 - val_loss: 0.3029 - val_accuracy: 0.9331 Epoch 208/250 202/202 [==============================] - 46s 228ms/step - loss: 0.0550 - accuracy: 0.9782 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 209/250 202/202 [==============================] - 46s 228ms/step - loss: 0.0555 - accuracy: 0.9775 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 210/250 202/202 [==============================] - 46s 227ms/step - loss: 0.0570 - accuracy: 0.9772 - val_loss: 0.3030 - val_accuracy: 0.9324 Epoch 211/250 202/202 [==============================] - 47s 231ms/step - loss: 0.0545 - accuracy: 0.9784 - val_loss: 0.3030 - val_accuracy: 0.9324 Epoch 212/250 202/202 [==============================] - 46s 230ms/step - loss: 0.0552 - accuracy: 0.9775 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 213/250 202/202 [==============================] - 46s 230ms/step - loss: 0.0575 - accuracy: 0.9775 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 214/250 202/202 [==============================] - 47s 232ms/step - loss: 0.0569 - accuracy: 0.9779 - val_loss: 0.3031 - val_accuracy: 0.9324 Epoch 215/250 202/202 [==============================] - 47s 230ms/step - loss: 0.0558 - accuracy: 0.9778 - val_loss: 0.3031 - val_accuracy: 0.9324 Epoch 216/250 202/202 [==============================] - 47s 232ms/step - loss: 0.0572 - accuracy: 0.9773 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 217/250 202/202 [==============================] - 47s 230ms/step - loss: 0.0557 - accuracy: 0.9781 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 218/250 202/202 [==============================] - 46s 229ms/step - loss: 0.0566 - accuracy: 0.9774 - val_loss: 0.3030 - val_accuracy: 0.9331 Epoch 219/250 202/202 [==============================] - 47s 234ms/step - loss: 0.0567 - accuracy: 0.9767 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 220/250 202/202 [==============================] - 48s 239ms/step - loss: 0.0568 - accuracy: 0.9771 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 221/250 202/202 [==============================] - 51s 251ms/step - loss: 0.0545 - accuracy: 0.9781 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 222/250 202/202 [==============================] - 56s 278ms/step - loss: 0.0562 - accuracy: 0.9784 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 223/250 202/202 [==============================] - 56s 278ms/step - loss: 0.0562 - accuracy: 0.9783 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 224/250 202/202 [==============================] - 56s 278ms/step - loss: 0.0545 - accuracy: 0.9799 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 225/250 202/202 [==============================] - 52s 258ms/step - loss: 0.0555 - accuracy: 0.9775 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 226/250 202/202 [==============================] - 48s 239ms/step - loss: 0.0565 - accuracy: 0.9779 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 227/250 202/202 [==============================] - 49s 243ms/step - loss: 0.0543 - accuracy: 0.9779 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 228/250 202/202 [==============================] - 54s 268ms/step - loss: 0.0557 - accuracy: 0.9784 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 229/250 202/202 [==============================] - 46s 228ms/step - loss: 0.0563 - accuracy: 0.9776 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 230/250 202/202 [==============================] - 51s 252ms/step - loss: 0.0565 - accuracy: 0.9760 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 231/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0570 - accuracy: 0.9771 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 232/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0567 - accuracy: 0.9768 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 233/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0555 - accuracy: 0.9780 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 234/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0573 - accuracy: 0.9764 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 235/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0563 - accuracy: 0.9775 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 236/250 202/202 [==============================] - 37s 184ms/step - loss: 0.0557 - accuracy: 0.9772 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 237/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0555 - accuracy: 0.9773 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 238/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0577 - accuracy: 0.9765 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 239/250 202/202 [==============================] - 37s 185ms/step - loss: 0.0572 - accuracy: 0.9768 - val_loss: 0.3031 - val_accuracy: 0.9331 Epoch 240/250 202/202 [==============================] - 37s 181ms/step - loss: 0.0553 - accuracy: 0.9781 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 241/250 202/202 [==============================] - 37s 181ms/step - loss: 0.0569 - accuracy: 0.9768 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 242/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0551 - accuracy: 0.9778 - val_loss: 0.3032 - val_accuracy: 0.9331 Epoch 243/250 202/202 [==============================] - 37s 181ms/step - loss: 0.0560 - accuracy: 0.9779 - val_loss: 0.3033 - val_accuracy: 0.9324 Epoch 244/250 202/202 [==============================] - 37s 181ms/step - loss: 0.0562 - accuracy: 0.9772 - val_loss: 0.3033 - val_accuracy: 0.9324 Epoch 245/250 202/202 [==============================] - 37s 181ms/step - loss: 0.0553 - accuracy: 0.9786 - val_loss: 0.3033 - val_accuracy: 0.9324 Epoch 246/250 202/202 [==============================] - 38s 186ms/step - loss: 0.0574 - accuracy: 0.9783 - val_loss: 0.3033 - val_accuracy: 0.9331 Epoch 247/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0565 - accuracy: 0.9779 - val_loss: 0.3033 - val_accuracy: 0.9331 Epoch 248/250 202/202 [==============================] - 37s 183ms/step - loss: 0.0571 - accuracy: 0.9765 - val_loss: 0.3033 - val_accuracy: 0.9331 Epoch 249/250 202/202 [==============================] - 38s 187ms/step - loss: 0.0575 - accuracy: 0.9769 - val_loss: 0.3034 - val_accuracy: 0.9324 Epoch 250/250 202/202 [==============================] - 37s 182ms/step - loss: 0.0572 - accuracy: 0.9778 - val_loss: 0.3034 - val_accuracy: 0.9324
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_1_g.history['accuracy']
train_loss = history_1_g.history['loss']
test_acc = history_1_g.history['val_accuracy']
test_loss = history_1_g.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
model_1_g.save('Trained Models/model_1_g.h5')
x_train, x_test, y_train, y_test = X_train, X_test, y2_train, y2_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
model_1_e=Sequential()
model_1_e.add(Conv1D(512, kernel_size=5, strides=1, padding='same', activation='relu', input_shape=(x_train.shape[1], 1)))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Conv1D(256, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Conv1D(128, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(Dropout(0.2))
model_1_e.add(Conv1D(64, kernel_size=5, strides=1, padding='same', activation='relu'))
model_1_e.add(MaxPooling1D(pool_size=5, strides = 2, padding = 'same'))
model_1_e.add(GlobalAveragePooling1D())
model_1_e.add(Flatten())
model_1_e.add(Dense(units=32, activation='relu'))
model_1_e.add(Dropout(0.3))
model_1_e.add(Dense(units=7, activation='softmax'))
model_1_e.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
model_1_e.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv1d_4 (Conv1D) (None, 13, 512) 3072 _________________________________________________________________ max_pooling1d_4 (MaxPooling1 (None, 7, 512) 0 _________________________________________________________________ conv1d_5 (Conv1D) (None, 7, 256) 655616 _________________________________________________________________ max_pooling1d_5 (MaxPooling1 (None, 4, 256) 0 _________________________________________________________________ conv1d_6 (Conv1D) (None, 4, 128) 163968 _________________________________________________________________ max_pooling1d_6 (MaxPooling1 (None, 2, 128) 0 _________________________________________________________________ dropout_2 (Dropout) (None, 2, 128) 0 _________________________________________________________________ conv1d_7 (Conv1D) (None, 2, 64) 41024 _________________________________________________________________ max_pooling1d_7 (MaxPooling1 (None, 1, 64) 0 _________________________________________________________________ global_average_pooling1d_1 ( (None, 64) 0 _________________________________________________________________ flatten_1 (Flatten) (None, 64) 0 _________________________________________________________________ dense_2 (Dense) (None, 32) 2080 _________________________________________________________________ dropout_3 (Dropout) (None, 32) 0 _________________________________________________________________ dense_3 (Dense) (None, 7) 231 ================================================================= Total params: 865,991 Trainable params: 865,991 Non-trainable params: 0 _________________________________________________________________
# Training
rlrp = ReduceLROnPlateau(monitor='loss', factor=0.4, verbose=0, patience=2, min_lr=0.0000001)
history_1_e = model_1_e.fit(x_train, y_train, batch_size=64, epochs=250, validation_data=(x_test, y_test), callbacks=[rlrp])
Epoch 1/250 202/202 [==============================] - 37s 185ms/step - loss: 1.8635 - accuracy: 0.2314 - val_loss: 1.6696 - val_accuracy: 0.3477 Epoch 2/250 202/202 [==============================] - 38s 187ms/step - loss: 1.6155 - accuracy: 0.3628 - val_loss: 1.4881 - val_accuracy: 0.4202 Epoch 3/250 202/202 [==============================] - 37s 185ms/step - loss: 1.5331 - accuracy: 0.3982 - val_loss: 1.4356 - val_accuracy: 0.4432 Epoch 4/250 202/202 [==============================] - 38s 188ms/step - loss: 1.4914 - accuracy: 0.4230 - val_loss: 1.4228 - val_accuracy: 0.4481 Epoch 5/250 202/202 [==============================] - 37s 182ms/step - loss: 1.4481 - accuracy: 0.4389 - val_loss: 1.3948 - val_accuracy: 0.4585 Epoch 6/250 202/202 [==============================] - 34s 168ms/step - loss: 1.3961 - accuracy: 0.4591 - val_loss: 1.3350 - val_accuracy: 0.4829 Epoch 7/250 202/202 [==============================] - 34s 168ms/step - loss: 1.3646 - accuracy: 0.4735 - val_loss: 1.3087 - val_accuracy: 0.4843 Epoch 8/250 202/202 [==============================] - 34s 168ms/step - loss: 1.3466 - accuracy: 0.4791 - val_loss: 1.3082 - val_accuracy: 0.4892 Epoch 9/250 202/202 [==============================] - 34s 171ms/step - loss: 1.3302 - accuracy: 0.4873 - val_loss: 1.2862 - val_accuracy: 0.4892 Epoch 10/250 202/202 [==============================] - 34s 168ms/step - loss: 1.2999 - accuracy: 0.4965 - val_loss: 1.2905 - val_accuracy: 0.4941 Epoch 11/250 202/202 [==============================] - 34s 169ms/step - loss: 1.2871 - accuracy: 0.5010 - val_loss: 1.2598 - val_accuracy: 0.5087 Epoch 12/250 202/202 [==============================] - 34s 170ms/step - loss: 1.2646 - accuracy: 0.5110 - val_loss: 1.2192 - val_accuracy: 0.5143 Epoch 13/250 202/202 [==============================] - 34s 168ms/step - loss: 1.2385 - accuracy: 0.5175 - val_loss: 1.2148 - val_accuracy: 0.5164 Epoch 14/250 202/202 [==============================] - 35s 171ms/step - loss: 1.2303 - accuracy: 0.5240 - val_loss: 1.2316 - val_accuracy: 0.5178 Epoch 15/250 202/202 [==============================] - 34s 168ms/step - loss: 1.2150 - accuracy: 0.5254 - val_loss: 1.1838 - val_accuracy: 0.5261 Epoch 16/250 202/202 [==============================] - 34s 170ms/step - loss: 1.2007 - accuracy: 0.5337 - val_loss: 1.1699 - val_accuracy: 0.5261 Epoch 17/250 202/202 [==============================] - 34s 169ms/step - loss: 1.1976 - accuracy: 0.5340 - val_loss: 1.1837 - val_accuracy: 0.5394 Epoch 18/250 202/202 [==============================] - 34s 168ms/step - loss: 1.1913 - accuracy: 0.5367 - val_loss: 1.1741 - val_accuracy: 0.5366 Epoch 19/250 202/202 [==============================] - 34s 168ms/step - loss: 1.1591 - accuracy: 0.5510 - val_loss: 1.1787 - val_accuracy: 0.5380 Epoch 20/250 202/202 [==============================] - 34s 170ms/step - loss: 1.1728 - accuracy: 0.5471 - val_loss: 1.1441 - val_accuracy: 0.5512 Epoch 21/250 202/202 [==============================] - 34s 168ms/step - loss: 1.1389 - accuracy: 0.5597 - val_loss: 1.1294 - val_accuracy: 0.5645 Epoch 22/250 202/202 [==============================] - 35s 171ms/step - loss: 1.1412 - accuracy: 0.5555 - val_loss: 1.1447 - val_accuracy: 0.5603 Epoch 23/250 202/202 [==============================] - 34s 170ms/step - loss: 1.1297 - accuracy: 0.5695 - val_loss: 1.1319 - val_accuracy: 0.5679 Epoch 24/250 202/202 [==============================] - 34s 170ms/step - loss: 1.1219 - accuracy: 0.5686 - val_loss: 1.1366 - val_accuracy: 0.5540 Epoch 25/250 202/202 [==============================] - 34s 168ms/step - loss: 1.1180 - accuracy: 0.5659 - val_loss: 1.1410 - val_accuracy: 0.5617 Epoch 26/250 202/202 [==============================] - 34s 170ms/step - loss: 1.1202 - accuracy: 0.5683 - val_loss: 1.1301 - val_accuracy: 0.5659 Epoch 27/250 202/202 [==============================] - 34s 171ms/step - loss: 1.0995 - accuracy: 0.5766 - val_loss: 1.1253 - val_accuracy: 0.5854 Epoch 28/250 202/202 [==============================] - 34s 168ms/step - loss: 1.0936 - accuracy: 0.5789 - val_loss: 1.1262 - val_accuracy: 0.5589 Epoch 29/250 202/202 [==============================] - 34s 169ms/step - loss: 1.0849 - accuracy: 0.5813 - val_loss: 1.1387 - val_accuracy: 0.5693 Epoch 30/250 202/202 [==============================] - 34s 170ms/step - loss: 1.0865 - accuracy: 0.5803 - val_loss: 1.1466 - val_accuracy: 0.5659 Epoch 31/250 202/202 [==============================] - 34s 169ms/step - loss: 1.0652 - accuracy: 0.5895 - val_loss: 1.0987 - val_accuracy: 0.5805 Epoch 32/250 202/202 [==============================] - 34s 168ms/step - loss: 1.0558 - accuracy: 0.5927 - val_loss: 1.1030 - val_accuracy: 0.5854 Epoch 33/250 202/202 [==============================] - 34s 168ms/step - loss: 1.0618 - accuracy: 0.5913 - val_loss: 1.1120 - val_accuracy: 0.5735 Epoch 34/250 202/202 [==============================] - 34s 170ms/step - loss: 1.0588 - accuracy: 0.5951 - val_loss: 1.0900 - val_accuracy: 0.5937 Epoch 35/250 202/202 [==============================] - 34s 169ms/step - loss: 0.9991 - accuracy: 0.6150 - val_loss: 1.0877 - val_accuracy: 0.5944 Epoch 36/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9832 - accuracy: 0.6194 - val_loss: 1.0823 - val_accuracy: 0.5937 Epoch 37/250 202/202 [==============================] - 34s 170ms/step - loss: 0.9742 - accuracy: 0.6276 - val_loss: 1.0661 - val_accuracy: 0.5875 Epoch 38/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9671 - accuracy: 0.6252 - val_loss: 1.0708 - val_accuracy: 0.5916 Epoch 39/250 202/202 [==============================] - 34s 169ms/step - loss: 0.9674 - accuracy: 0.6271 - val_loss: 1.0779 - val_accuracy: 0.5902 Epoch 40/250 202/202 [==============================] - 34s 170ms/step - loss: 0.9625 - accuracy: 0.6326 - val_loss: 1.0592 - val_accuracy: 0.6091 Epoch 41/250 202/202 [==============================] - 34s 170ms/step - loss: 0.9605 - accuracy: 0.6327 - val_loss: 1.0584 - val_accuracy: 0.5965 Epoch 42/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9527 - accuracy: 0.6316 - val_loss: 1.0627 - val_accuracy: 0.5958 Epoch 43/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9537 - accuracy: 0.6325 - val_loss: 1.0758 - val_accuracy: 0.5937 Epoch 44/250 202/202 [==============================] - 34s 169ms/step - loss: 0.9449 - accuracy: 0.6372 - val_loss: 1.0663 - val_accuracy: 0.6014 Epoch 45/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9399 - accuracy: 0.6422 - val_loss: 1.0728 - val_accuracy: 0.5979 Epoch 46/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9341 - accuracy: 0.6438 - val_loss: 1.0723 - val_accuracy: 0.6014 Epoch 47/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9292 - accuracy: 0.6406 - val_loss: 1.0849 - val_accuracy: 0.5986 Epoch 48/250 202/202 [==============================] - 36s 180ms/step - loss: 0.9227 - accuracy: 0.6453 - val_loss: 1.0800 - val_accuracy: 0.5944 Epoch 49/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9241 - accuracy: 0.6493 - val_loss: 1.0981 - val_accuracy: 0.5923 Epoch 50/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9136 - accuracy: 0.6503 - val_loss: 1.0756 - val_accuracy: 0.5958 Epoch 51/250 202/202 [==============================] - 34s 170ms/step - loss: 0.9048 - accuracy: 0.6540 - val_loss: 1.0591 - val_accuracy: 0.6000 Epoch 52/250 202/202 [==============================] - 34s 168ms/step - loss: 0.9056 - accuracy: 0.6496 - val_loss: 1.0770 - val_accuracy: 0.5909 Epoch 53/250 202/202 [==============================] - 34s 168ms/step - loss: 0.8901 - accuracy: 0.6579 - val_loss: 1.1022 - val_accuracy: 0.5930 Epoch 54/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8934 - accuracy: 0.6547 - val_loss: 1.0798 - val_accuracy: 0.6000 Epoch 55/250 202/202 [==============================] - 34s 170ms/step - loss: 0.8872 - accuracy: 0.6626 - val_loss: 1.0876 - val_accuracy: 0.5993 Epoch 56/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8870 - accuracy: 0.6604 - val_loss: 1.0697 - val_accuracy: 0.5979 Epoch 57/250 202/202 [==============================] - 34s 168ms/step - loss: 0.8852 - accuracy: 0.6627 - val_loss: 1.0648 - val_accuracy: 0.5993 Epoch 58/250 202/202 [==============================] - 34s 169ms/step - loss: 0.8754 - accuracy: 0.6636 - val_loss: 1.0827 - val_accuracy: 0.6035 Epoch 59/250 202/202 [==============================] - 34s 168ms/step - loss: 0.8654 - accuracy: 0.6674 - val_loss: 1.0856 - val_accuracy: 0.6000 Epoch 60/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8720 - accuracy: 0.6678 - val_loss: 1.0895 - val_accuracy: 0.5986 Epoch 61/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8617 - accuracy: 0.6732 - val_loss: 1.1048 - val_accuracy: 0.6049 Epoch 62/250 202/202 [==============================] - 34s 170ms/step - loss: 0.8552 - accuracy: 0.6739 - val_loss: 1.0932 - val_accuracy: 0.6014 Epoch 63/250 202/202 [==============================] - 34s 168ms/step - loss: 0.8531 - accuracy: 0.6706 - val_loss: 1.0755 - val_accuracy: 0.6035 Epoch 64/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8532 - accuracy: 0.6757 - val_loss: 1.0691 - val_accuracy: 0.5986 Epoch 65/250 202/202 [==============================] - 34s 169ms/step - loss: 0.8537 - accuracy: 0.6739 - val_loss: 1.0902 - val_accuracy: 0.6077 Epoch 66/250 202/202 [==============================] - 34s 169ms/step - loss: 0.8150 - accuracy: 0.6907 - val_loss: 1.0887 - val_accuracy: 0.6035 Epoch 67/250 202/202 [==============================] - 34s 170ms/step - loss: 0.8080 - accuracy: 0.6975 - val_loss: 1.0667 - val_accuracy: 0.6084 Epoch 68/250 202/202 [==============================] - 34s 167ms/step - loss: 0.8013 - accuracy: 0.6955 - val_loss: 1.0846 - val_accuracy: 0.6028 Epoch 69/250 202/202 [==============================] - 34s 169ms/step - loss: 0.7901 - accuracy: 0.6979 - val_loss: 1.1064 - val_accuracy: 0.5986 Epoch 70/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7909 - accuracy: 0.6960 - val_loss: 1.1054 - val_accuracy: 0.6007 Epoch 71/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7879 - accuracy: 0.6988 - val_loss: 1.1090 - val_accuracy: 0.6014 Epoch 72/250 202/202 [==============================] - 35s 171ms/step - loss: 0.7946 - accuracy: 0.7000 - val_loss: 1.1111 - val_accuracy: 0.6049 Epoch 73/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7867 - accuracy: 0.6986 - val_loss: 1.1116 - val_accuracy: 0.5951 Epoch 74/250 202/202 [==============================] - 35s 171ms/step - loss: 0.7829 - accuracy: 0.7030 - val_loss: 1.0958 - val_accuracy: 0.6035 Epoch 75/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7784 - accuracy: 0.7019 - val_loss: 1.0953 - val_accuracy: 0.5993 Epoch 76/250 202/202 [==============================] - 34s 170ms/step - loss: 0.7739 - accuracy: 0.7032 - val_loss: 1.1242 - val_accuracy: 0.6000 Epoch 77/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7755 - accuracy: 0.7062 - val_loss: 1.1054 - val_accuracy: 0.6042 Epoch 78/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7744 - accuracy: 0.7050 - val_loss: 1.1337 - val_accuracy: 0.6021 Epoch 79/250 202/202 [==============================] - 34s 170ms/step - loss: 0.7633 - accuracy: 0.7106 - val_loss: 1.1081 - val_accuracy: 0.6021 Epoch 80/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7557 - accuracy: 0.7178 - val_loss: 1.1140 - val_accuracy: 0.6105 Epoch 81/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7523 - accuracy: 0.7170 - val_loss: 1.1059 - val_accuracy: 0.6077 Epoch 82/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7563 - accuracy: 0.7125 - val_loss: 1.1066 - val_accuracy: 0.6042 Epoch 83/250 202/202 [==============================] - 34s 169ms/step - loss: 0.7506 - accuracy: 0.7148 - val_loss: 1.1168 - val_accuracy: 0.6021 Epoch 84/250 202/202 [==============================] - 38s 188ms/step - loss: 0.7541 - accuracy: 0.7130 - val_loss: 1.1192 - val_accuracy: 0.6042 Epoch 85/250 202/202 [==============================] - 42s 210ms/step - loss: 0.7486 - accuracy: 0.7129 - val_loss: 1.1122 - val_accuracy: 0.6077 Epoch 86/250 202/202 [==============================] - 49s 242ms/step - loss: 0.7509 - accuracy: 0.7164 - val_loss: 1.1242 - val_accuracy: 0.6035 Epoch 87/250 202/202 [==============================] - 43s 214ms/step - loss: 0.7521 - accuracy: 0.7132 - val_loss: 1.1166 - val_accuracy: 0.6021 Epoch 88/250 202/202 [==============================] - 43s 212ms/step - loss: 0.7385 - accuracy: 0.7220 - val_loss: 1.1190 - val_accuracy: 0.6070 Epoch 89/250 202/202 [==============================] - 40s 200ms/step - loss: 0.7378 - accuracy: 0.7206 - val_loss: 1.1131 - val_accuracy: 0.6049 Epoch 90/250 202/202 [==============================] - 45s 223ms/step - loss: 0.7400 - accuracy: 0.7201 - val_loss: 1.1194 - val_accuracy: 0.6007 Epoch 91/250 202/202 [==============================] - 44s 218ms/step - loss: 0.7414 - accuracy: 0.7172 - val_loss: 1.1150 - val_accuracy: 0.6063 Epoch 92/250 202/202 [==============================] - 45s 224ms/step - loss: 0.7399 - accuracy: 0.7182 - val_loss: 1.1195 - val_accuracy: 0.60560.7396 - accuracy: 0. Epoch 93/250 202/202 [==============================] - 43s 211ms/step - loss: 0.7371 - accuracy: 0.7244 - val_loss: 1.1204 - val_accuracy: 0.6063 Epoch 94/250 202/202 [==============================] - 39s 194ms/step - loss: 0.7356 - accuracy: 0.7244 - val_loss: 1.1176 - val_accuracy: 0.6049 Epoch 95/250 202/202 [==============================] - 43s 213ms/step - loss: 0.7341 - accuracy: 0.7197 - val_loss: 1.1153 - val_accuracy: 0.6077 Epoch 96/250 202/202 [==============================] - 46s 227ms/step - loss: 0.7380 - accuracy: 0.7215 - val_loss: 1.1142 - val_accuracy: 0.6063 Epoch 97/250 202/202 [==============================] - 41s 204ms/step - loss: 0.7348 - accuracy: 0.7249 - val_loss: 1.1169 - val_accuracy: 0.6063 Epoch 98/250 202/202 [==============================] - 39s 192ms/step - loss: 0.7353 - accuracy: 0.7226 - val_loss: 1.1171 - val_accuracy: 0.6084 Epoch 99/250 202/202 [==============================] - 39s 195ms/step - loss: 0.7306 - accuracy: 0.7237 - val_loss: 1.1196 - val_accuracy: 0.6077 Epoch 100/250 202/202 [==============================] - 39s 194ms/step - loss: 0.7305 - accuracy: 0.7223 - val_loss: 1.1187 - val_accuracy: 0.6077 Epoch 101/250 202/202 [==============================] - 38s 188ms/step - loss: 0.7337 - accuracy: 0.7253 - val_loss: 1.1180 - val_accuracy: 0.6098 Epoch 102/250 202/202 [==============================] - 41s 205ms/step - loss: 0.7301 - accuracy: 0.7239 - val_loss: 1.1183 - val_accuracy: 0.6084 Epoch 103/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7330 - accuracy: 0.7238 - val_loss: 1.1184 - val_accuracy: 0.6091 Epoch 104/250 202/202 [==============================] - 35s 175ms/step - loss: 0.7351 - accuracy: 0.7232 - val_loss: 1.1180 - val_accuracy: 0.6091 Epoch 105/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7299 - accuracy: 0.7275 - val_loss: 1.1184 - val_accuracy: 0.6091 Epoch 106/250 202/202 [==============================] - 37s 183ms/step - loss: 0.7344 - accuracy: 0.7226 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 107/250 202/202 [==============================] - 37s 184ms/step - loss: 0.7378 - accuracy: 0.7207 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 108/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7369 - accuracy: 0.7242 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 109/250 202/202 [==============================] - 42s 210ms/step - loss: 0.7350 - accuracy: 0.7237 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 110/250 202/202 [==============================] - 38s 188ms/step - loss: 0.7294 - accuracy: 0.7250 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 111/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7314 - accuracy: 0.7235 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 112/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7324 - accuracy: 0.7248 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 113/250 202/202 [==============================] - 37s 183ms/step - loss: 0.7285 - accuracy: 0.7252 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 114/250 202/202 [==============================] - 36s 177ms/step - loss: 0.7331 - accuracy: 0.7220 - val_loss: 1.1185 - val_accuracy: 0.6091 Epoch 115/250 202/202 [==============================] - 48s 237ms/step - loss: 0.7298 - accuracy: 0.7226 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 116/250 202/202 [==============================] - 43s 215ms/step - loss: 0.7293 - accuracy: 0.7233 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 117/250 202/202 [==============================] - 53s 261ms/step - loss: 0.7333 - accuracy: 0.7269 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 118/250 202/202 [==============================] - 65s 323ms/step - loss: 0.7381 - accuracy: 0.7228 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 119/250 202/202 [==============================] - 44s 217ms/step - loss: 0.7316 - accuracy: 0.7248 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 120/250 202/202 [==============================] - 52s 259ms/step - loss: 0.7331 - accuracy: 0.7208 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 121/250 202/202 [==============================] - 44s 219ms/step - loss: 0.7297 - accuracy: 0.7269 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 122/250 202/202 [==============================] - 37s 182ms/step - loss: 0.7357 - accuracy: 0.7202 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 123/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7351 - accuracy: 0.7230 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 124/250 202/202 [==============================] - 37s 181ms/step - loss: 0.7292 - accuracy: 0.7248 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 125/250 202/202 [==============================] - 37s 181ms/step - loss: 0.7326 - accuracy: 0.7236 - val_loss: 1.1186 - val_accuracy: 0.6091 Epoch 126/250 202/202 [==============================] - 37s 181ms/step - loss: 0.7279 - accuracy: 0.7259 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 127/250 202/202 [==============================] - 36s 180ms/step - loss: 0.7349 - accuracy: 0.7230 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 128/250 202/202 [==============================] - 36s 180ms/step - loss: 0.7307 - accuracy: 0.7261 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 129/250 202/202 [==============================] - 36s 181ms/step - loss: 0.7323 - accuracy: 0.7230 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 130/250 202/202 [==============================] - 37s 182ms/step - loss: 0.7423 - accuracy: 0.7196 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 131/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7319 - accuracy: 0.7241 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 132/250 202/202 [==============================] - 37s 181ms/step - loss: 0.7296 - accuracy: 0.7258 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 133/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7326 - accuracy: 0.7231 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 134/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7352 - accuracy: 0.7241 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 135/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7326 - accuracy: 0.7199 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 136/250 202/202 [==============================] - 36s 180ms/step - loss: 0.7311 - accuracy: 0.7243 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 137/250 202/202 [==============================] - 37s 184ms/step - loss: 0.7277 - accuracy: 0.7236 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 138/250 202/202 [==============================] - 48s 240ms/step - loss: 0.7304 - accuracy: 0.7265 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 139/250 202/202 [==============================] - 47s 234ms/step - loss: 0.7328 - accuracy: 0.7250 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 140/250 202/202 [==============================] - 47s 232ms/step - loss: 0.7371 - accuracy: 0.7236 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 141/250 202/202 [==============================] - 47s 232ms/step - loss: 0.7332 - accuracy: 0.7197 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 142/250 202/202 [==============================] - 49s 243ms/step - loss: 0.7284 - accuracy: 0.7232 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 143/250 202/202 [==============================] - 48s 237ms/step - loss: 0.7337 - accuracy: 0.7245 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 144/250 202/202 [==============================] - 44s 219ms/step - loss: 0.7286 - accuracy: 0.7254 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 145/250 202/202 [==============================] - 38s 186ms/step - loss: 0.7277 - accuracy: 0.7250 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 146/250 202/202 [==============================] - 44s 218ms/step - loss: 0.7289 - accuracy: 0.7257 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 147/250 202/202 [==============================] - 41s 202ms/step - loss: 0.7313 - accuracy: 0.7206 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 148/250 202/202 [==============================] - 42s 206ms/step - loss: 0.7353 - accuracy: 0.7216 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 149/250 202/202 [==============================] - 41s 201ms/step - loss: 0.7353 - accuracy: 0.7223 - val_loss: 1.1187 - val_accuracy: 0.6091 Epoch 150/250 202/202 [==============================] - 41s 204ms/step - loss: 0.7298 - accuracy: 0.7264 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 151/250 202/202 [==============================] - 39s 192ms/step - loss: 0.7314 - accuracy: 0.7216 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 152/250 202/202 [==============================] - 41s 204ms/step - loss: 0.7324 - accuracy: 0.7285 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 153/250 202/202 [==============================] - 57s 283ms/step - loss: 0.7310 - accuracy: 0.7240 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 154/250 202/202 [==============================] - 69s 339ms/step - loss: 0.7303 - accuracy: 0.7230 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 155/250 202/202 [==============================] - 62s 305ms/step - loss: 0.7299 - accuracy: 0.7219 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 156/250 202/202 [==============================] - 68s 336ms/step - loss: 0.7305 - accuracy: 0.7253 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 157/250 202/202 [==============================] - 96s 474ms/step - loss: 0.7271 - accuracy: 0.7261 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 158/250 202/202 [==============================] - 90s 448ms/step - loss: 0.7324 - accuracy: 0.7262 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 159/250 202/202 [==============================] - 64s 319ms/step - loss: 0.7352 - accuracy: 0.7240 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 160/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7347 - accuracy: 0.7250 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 161/250 202/202 [==============================] - 36s 177ms/step - loss: 0.7222 - accuracy: 0.7285 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 162/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7352 - accuracy: 0.7245 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 163/250 202/202 [==============================] - 36s 176ms/step - loss: 0.7388 - accuracy: 0.7199 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 164/250 202/202 [==============================] - 36s 177ms/step - loss: 0.7303 - accuracy: 0.7254 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 165/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7301 - accuracy: 0.7206 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 166/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7283 - accuracy: 0.7233 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 167/250 202/202 [==============================] - 36s 177ms/step - loss: 0.7319 - accuracy: 0.7252 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 168/250 202/202 [==============================] - 36s 179ms/step - loss: 0.7387 - accuracy: 0.7185 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 169/250 202/202 [==============================] - 36s 176ms/step - loss: 0.7366 - accuracy: 0.7225 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 170/250 202/202 [==============================] - 35s 175ms/step - loss: 0.7308 - accuracy: 0.7253 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 171/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7261 - accuracy: 0.7281 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 172/250 202/202 [==============================] - 36s 176ms/step - loss: 0.7311 - accuracy: 0.7247 - val_loss: 1.1190 - val_accuracy: 0.6091 Epoch 173/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7326 - accuracy: 0.7226 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 174/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7315 - accuracy: 0.7248 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 175/250 202/202 [==============================] - 35s 176ms/step - loss: 0.7303 - accuracy: 0.7225 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 176/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7304 - accuracy: 0.7265 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 177/250 202/202 [==============================] - 36s 177ms/step - loss: 0.7373 - accuracy: 0.7267 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 178/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7262 - accuracy: 0.7279 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 179/250 202/202 [==============================] - 35s 175ms/step - loss: 0.7293 - accuracy: 0.7274 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 180/250 202/202 [==============================] - 35s 174ms/step - loss: 0.7305 - accuracy: 0.7244 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 181/250 202/202 [==============================] - 35s 173ms/step - loss: 0.7355 - accuracy: 0.7221 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 182/250 202/202 [==============================] - 35s 176ms/step - loss: 0.7353 - accuracy: 0.7219 - val_loss: 1.1187 - val_accuracy: 0.6084 Epoch 183/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7281 - accuracy: 0.7249 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 184/250 202/202 [==============================] - 35s 175ms/step - loss: 0.7318 - accuracy: 0.7244 - val_loss: 1.1187 - val_accuracy: 0.6084 Epoch 185/250 202/202 [==============================] - 35s 172ms/step - loss: 0.7268 - accuracy: 0.7231 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 186/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7326 - accuracy: 0.7271 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 187/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7344 - accuracy: 0.7261 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 188/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7358 - accuracy: 0.7203 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 189/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7280 - accuracy: 0.7214 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 190/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7282 - accuracy: 0.7262 - val_loss: 1.1188 - val_accuracy: 0.6091 Epoch 191/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7282 - accuracy: 0.7274 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 192/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7343 - accuracy: 0.7260 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 193/250 202/202 [==============================] - 34s 169ms/step - loss: 0.7295 - accuracy: 0.7233 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 194/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7321 - accuracy: 0.7261 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 195/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7285 - accuracy: 0.7245 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 196/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7357 - accuracy: 0.7222 - val_loss: 1.1189 - val_accuracy: 0.6091 Epoch 197/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7342 - accuracy: 0.7239 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 198/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7317 - accuracy: 0.7261 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 199/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7292 - accuracy: 0.7266 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 200/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7264 - accuracy: 0.7247 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 201/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7323 - accuracy: 0.7252 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 202/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7289 - accuracy: 0.7233 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 203/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7316 - accuracy: 0.7249 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 204/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7375 - accuracy: 0.7208 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 205/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7326 - accuracy: 0.7221 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 206/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7315 - accuracy: 0.7261 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 207/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7283 - accuracy: 0.7299 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 208/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7412 - accuracy: 0.7243 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 209/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7399 - accuracy: 0.7218 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 210/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7303 - accuracy: 0.7237 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 211/250 202/202 [==============================] - 34s 169ms/step - loss: 0.7333 - accuracy: 0.7239 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 212/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7313 - accuracy: 0.7233 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 213/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7304 - accuracy: 0.7233 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 214/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7321 - accuracy: 0.7224 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 215/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7302 - accuracy: 0.7237 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 216/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7317 - accuracy: 0.7268 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 217/250 202/202 [==============================] - 34s 169ms/step - loss: 0.7332 - accuracy: 0.7246 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 218/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7361 - accuracy: 0.7228 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 219/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7330 - accuracy: 0.7240 - val_loss: 1.1190 - val_accuracy: 0.6077 Epoch 220/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7318 - accuracy: 0.7254 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 221/250 202/202 [==============================] - 34s 166ms/step - loss: 0.7287 - accuracy: 0.7277 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 222/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7336 - accuracy: 0.7239 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 223/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7312 - accuracy: 0.7277 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 224/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7325 - accuracy: 0.7258 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 225/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7313 - accuracy: 0.7259 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 226/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7333 - accuracy: 0.7242 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 227/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7310 - accuracy: 0.7241 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 228/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7354 - accuracy: 0.7238 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 229/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7259 - accuracy: 0.7280 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 230/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7310 - accuracy: 0.7242 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 231/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7273 - accuracy: 0.7256 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 232/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7369 - accuracy: 0.7226 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 233/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7337 - accuracy: 0.7247 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 234/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7337 - accuracy: 0.7216 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 235/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7323 - accuracy: 0.7261 - val_loss: 1.1187 - val_accuracy: 0.6077 Epoch 236/250 202/202 [==============================] - 36s 178ms/step - loss: 0.7276 - accuracy: 0.7254 - val_loss: 1.1187 - val_accuracy: 0.6077 Epoch 237/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7228 - accuracy: 0.7264 - val_loss: 1.1187 - val_accuracy: 0.6077 Epoch 238/250 202/202 [==============================] - 33s 166ms/step - loss: 0.7320 - accuracy: 0.7242 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 239/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7266 - accuracy: 0.7294 - val_loss: 1.1189 - val_accuracy: 0.6084 Epoch 240/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7348 - accuracy: 0.7230 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 241/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7307 - accuracy: 0.7220 - val_loss: 1.1188 - val_accuracy: 0.6084 Epoch 242/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7278 - accuracy: 0.7259 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 243/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7313 - accuracy: 0.7247 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 244/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7327 - accuracy: 0.7240 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 245/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7312 - accuracy: 0.7270 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 246/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7289 - accuracy: 0.7239 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 247/250 202/202 [==============================] - 34s 168ms/step - loss: 0.7340 - accuracy: 0.7242 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 248/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7293 - accuracy: 0.7256 - val_loss: 1.1189 - val_accuracy: 0.6077 Epoch 249/250 202/202 [==============================] - 33s 165ms/step - loss: 0.7264 - accuracy: 0.7277 - val_loss: 1.1188 - val_accuracy: 0.6077 Epoch 250/250 202/202 [==============================] - 34s 167ms/step - loss: 0.7299 - accuracy: 0.7268 - val_loss: 1.1188 - val_accuracy: 0.6077
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_1_e.history['accuracy']
train_loss = history_1_e.history['loss']
test_acc = history_1_e.history['val_accuracy']
test_loss = history_1_e.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
model_1_e.save('Trained Models/model_1_e.h5')
x_train, x_test, y_train, y_test = X_train, X_test, y1_train, y1_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
from tensorflow.keras.layers import LSTM, GRU, Dense, Embedding, Dropout, GlobalAveragePooling1D, Flatten, SpatialDropout1D, Bidirectional
lstm_1_g = Sequential()
lstm_1_g.add(Dense(256, activation='relu', input_shape=(x_train.shape[1],1)))
lstm_1_g.add(Dense(128, activation='relu'))
lstm_1_g.add(Dense(64, activation='relu'))
lstm_1_g.add(Dense(32, activation='relu'))
lstm_1_g.add(Dense(16, activation='relu'))
lstm_1_g.add(Dense(8, activation='relu'))
lstm_1_g.add(Flatten())
lstm_1_g.add(Dense(2, activation='softmax'))
lstm_1_g.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
lstm_1_g.summary()
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_4 (Dense) (None, 13, 256) 512 _________________________________________________________________ dense_5 (Dense) (None, 13, 128) 32896 _________________________________________________________________ dense_6 (Dense) (None, 13, 64) 8256 _________________________________________________________________ dense_7 (Dense) (None, 13, 32) 2080 _________________________________________________________________ dense_8 (Dense) (None, 13, 16) 528 _________________________________________________________________ dense_9 (Dense) (None, 13, 8) 136 _________________________________________________________________ flatten_2 (Flatten) (None, 104) 0 _________________________________________________________________ dense_10 (Dense) (None, 2) 210 ================================================================= Total params: 44,618 Trainable params: 44,618 Non-trainable params: 0 _________________________________________________________________
history_2_g = lstm_1_g.fit(x_train, y_train, epochs=250, batch_size=64, validation_split=0.2)
Epoch 1/250 162/162 [==============================] - 4s 27ms/step - loss: 0.6324 - accuracy: 0.6241 - val_loss: 0.5345 - val_accuracy: 0.7147 Epoch 2/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4967 - accuracy: 0.7427 - val_loss: 0.5070 - val_accuracy: 0.7387 Epoch 3/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4776 - accuracy: 0.7548 - val_loss: 0.4952 - val_accuracy: 0.7433 Epoch 4/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4692 - accuracy: 0.7640 - val_loss: 0.5025 - val_accuracy: 0.7507 Epoch 5/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4661 - accuracy: 0.7692 - val_loss: 0.4851 - val_accuracy: 0.7584 Epoch 6/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4570 - accuracy: 0.7749 - val_loss: 0.4729 - val_accuracy: 0.7573 Epoch 7/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4532 - accuracy: 0.7798 - val_loss: 0.4663 - val_accuracy: 0.7642 Epoch 8/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4501 - accuracy: 0.7831 - val_loss: 0.4701 - val_accuracy: 0.7716 Epoch 9/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4488 - accuracy: 0.7814 - val_loss: 0.4693 - val_accuracy: 0.7727 Epoch 10/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4437 - accuracy: 0.7859 - val_loss: 0.4575 - val_accuracy: 0.7774 Epoch 11/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4408 - accuracy: 0.7870 - val_loss: 0.4701 - val_accuracy: 0.7600 Epoch 12/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4368 - accuracy: 0.7912 - val_loss: 0.4550 - val_accuracy: 0.7758 Epoch 13/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4375 - accuracy: 0.7913 - val_loss: 0.4509 - val_accuracy: 0.7762 Epoch 14/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4357 - accuracy: 0.7907 - val_loss: 0.4444 - val_accuracy: 0.7805 Epoch 15/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4331 - accuracy: 0.7906 - val_loss: 0.4447 - val_accuracy: 0.7801 Epoch 16/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4313 - accuracy: 0.7938 - val_loss: 0.4464 - val_accuracy: 0.7805 Epoch 17/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4306 - accuracy: 0.7929 - val_loss: 0.4563 - val_accuracy: 0.7743 Epoch 18/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4257 - accuracy: 0.7953 - val_loss: 0.4409 - val_accuracy: 0.7758 Epoch 19/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4275 - accuracy: 0.7929 - val_loss: 0.4348 - val_accuracy: 0.7840 Epoch 20/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4238 - accuracy: 0.7967 - val_loss: 0.4501 - val_accuracy: 0.7844 Epoch 21/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4217 - accuracy: 0.7968 - val_loss: 0.4417 - val_accuracy: 0.7801 Epoch 22/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4213 - accuracy: 0.7960 - val_loss: 0.4334 - val_accuracy: 0.7782 Epoch 23/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4201 - accuracy: 0.7968 - val_loss: 0.4401 - val_accuracy: 0.7836 Epoch 24/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4214 - accuracy: 0.7961 - val_loss: 0.4445 - val_accuracy: 0.7782 Epoch 25/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4196 - accuracy: 0.7976 - val_loss: 0.4317 - val_accuracy: 0.7789 Epoch 26/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4180 - accuracy: 0.7982 - val_loss: 0.4411 - val_accuracy: 0.7816 Epoch 27/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4180 - accuracy: 0.8003 - val_loss: 0.4348 - val_accuracy: 0.7863 Epoch 28/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4157 - accuracy: 0.7989 - val_loss: 0.4284 - val_accuracy: 0.7871 Epoch 29/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4129 - accuracy: 0.8019 - val_loss: 0.4353 - val_accuracy: 0.7875 Epoch 30/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4162 - accuracy: 0.8003 - val_loss: 0.4286 - val_accuracy: 0.7844 Epoch 31/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4157 - accuracy: 0.7967 - val_loss: 0.4300 - val_accuracy: 0.7844 Epoch 32/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4126 - accuracy: 0.8027 - val_loss: 0.4431 - val_accuracy: 0.7782 Epoch 33/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4119 - accuracy: 0.8002 - val_loss: 0.4278 - val_accuracy: 0.7906 Epoch 34/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4093 - accuracy: 0.8036 - val_loss: 0.4288 - val_accuracy: 0.7886 Epoch 35/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4086 - accuracy: 0.8038 - val_loss: 0.4232 - val_accuracy: 0.7898 Epoch 36/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4121 - accuracy: 0.8008 - val_loss: 0.4419 - val_accuracy: 0.7789 Epoch 37/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4103 - accuracy: 0.8008 - val_loss: 0.4306 - val_accuracy: 0.7894 Epoch 38/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4107 - accuracy: 0.8044 - val_loss: 0.4235 - val_accuracy: 0.7917 Epoch 39/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4084 - accuracy: 0.8033 - val_loss: 0.4294 - val_accuracy: 0.7851 Epoch 40/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4075 - accuracy: 0.8027 - val_loss: 0.4302 - val_accuracy: 0.7824 Epoch 41/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4119 - accuracy: 0.7998 - val_loss: 0.4222 - val_accuracy: 0.7909 Epoch 42/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4075 - accuracy: 0.8034 - val_loss: 0.4258 - val_accuracy: 0.7902 Epoch 43/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4112 - accuracy: 0.8040 - val_loss: 0.4244 - val_accuracy: 0.7886 Epoch 44/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4076 - accuracy: 0.8039 - val_loss: 0.4334 - val_accuracy: 0.7828 Epoch 45/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4057 - accuracy: 0.8062 - val_loss: 0.4207 - val_accuracy: 0.7890 Epoch 46/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4018 - accuracy: 0.8097 - val_loss: 0.4269 - val_accuracy: 0.7890 Epoch 47/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4058 - accuracy: 0.8027 - val_loss: 0.4257 - val_accuracy: 0.7933 Epoch 48/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4000 - accuracy: 0.8106 - val_loss: 0.4251 - val_accuracy: 0.7855 Epoch 49/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4071 - accuracy: 0.8025 - val_loss: 0.4520 - val_accuracy: 0.7704 Epoch 50/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4040 - accuracy: 0.8067 - val_loss: 0.4238 - val_accuracy: 0.7940 Epoch 51/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4001 - accuracy: 0.8074 - val_loss: 0.4195 - val_accuracy: 0.7886 Epoch 52/250 162/162 [==============================] - 4s 27ms/step - loss: 0.4037 - accuracy: 0.8084 - val_loss: 0.4175 - val_accuracy: 0.7902 Epoch 53/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4039 - accuracy: 0.8070 - val_loss: 0.4241 - val_accuracy: 0.7836 Epoch 54/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3991 - accuracy: 0.8112 - val_loss: 0.4267 - val_accuracy: 0.7998 Epoch 55/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4007 - accuracy: 0.8081 - val_loss: 0.4151 - val_accuracy: 0.7964 Epoch 56/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3973 - accuracy: 0.8106 - val_loss: 0.4164 - val_accuracy: 0.7960 Epoch 57/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3985 - accuracy: 0.8105 - val_loss: 0.4143 - val_accuracy: 0.7987 Epoch 58/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4025 - accuracy: 0.8071 - val_loss: 0.4205 - val_accuracy: 0.7952 Epoch 59/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3990 - accuracy: 0.8083 - val_loss: 0.4245 - val_accuracy: 0.7863 Epoch 60/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3965 - accuracy: 0.8101 - val_loss: 0.4198 - val_accuracy: 0.7983 Epoch 61/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3955 - accuracy: 0.8139 - val_loss: 0.4154 - val_accuracy: 0.7933 Epoch 62/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3963 - accuracy: 0.8117 - val_loss: 0.4095 - val_accuracy: 0.7975 Epoch 63/250 162/162 [==============================] - 4s 26ms/step - loss: 0.4003 - accuracy: 0.8075 - val_loss: 0.4083 - val_accuracy: 0.8026 Epoch 64/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3947 - accuracy: 0.8136 - val_loss: 0.4152 - val_accuracy: 0.7983 Epoch 65/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3999 - accuracy: 0.8086 - val_loss: 0.4078 - val_accuracy: 0.8002 Epoch 66/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3960 - accuracy: 0.8131 - val_loss: 0.4125 - val_accuracy: 0.7948 Epoch 67/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3950 - accuracy: 0.8120 - val_loss: 0.4120 - val_accuracy: 0.7925 Epoch 68/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3935 - accuracy: 0.8129 - val_loss: 0.4799 - val_accuracy: 0.7511 Epoch 69/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3991 - accuracy: 0.8086 - val_loss: 0.4083 - val_accuracy: 0.7998 Epoch 70/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3904 - accuracy: 0.8158 - val_loss: 0.4143 - val_accuracy: 0.8018 Epoch 71/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3960 - accuracy: 0.8104 - val_loss: 0.4040 - val_accuracy: 0.8033 Epoch 72/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3934 - accuracy: 0.8123 - val_loss: 0.4054 - val_accuracy: 0.7975 Epoch 73/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3918 - accuracy: 0.8141 - val_loss: 0.4080 - val_accuracy: 0.7991 Epoch 74/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3963 - accuracy: 0.8087 - val_loss: 0.4072 - val_accuracy: 0.8041 Epoch 75/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3940 - accuracy: 0.8123 - val_loss: 0.4069 - val_accuracy: 0.8041 Epoch 76/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3927 - accuracy: 0.8134 - val_loss: 0.4086 - val_accuracy: 0.8037 Epoch 77/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3947 - accuracy: 0.8107 - val_loss: 0.4236 - val_accuracy: 0.7971 Epoch 78/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3949 - accuracy: 0.8105 - val_loss: 0.4249 - val_accuracy: 0.7859 Epoch 79/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3930 - accuracy: 0.8156 - val_loss: 0.4347 - val_accuracy: 0.7917 Epoch 80/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3992 - accuracy: 0.8119 - val_loss: 0.4057 - val_accuracy: 0.8010 Epoch 81/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3914 - accuracy: 0.8164 - val_loss: 0.4070 - val_accuracy: 0.8037 Epoch 82/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3889 - accuracy: 0.8152 - val_loss: 0.4214 - val_accuracy: 0.7925 Epoch 83/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3917 - accuracy: 0.8168 - val_loss: 0.4013 - val_accuracy: 0.8026 Epoch 84/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3951 - accuracy: 0.8135 - val_loss: 0.4014 - val_accuracy: 0.8018 Epoch 85/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3919 - accuracy: 0.8168 - val_loss: 0.4062 - val_accuracy: 0.8002 Epoch 86/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3924 - accuracy: 0.8134 - val_loss: 0.4167 - val_accuracy: 0.7929 Epoch 87/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3902 - accuracy: 0.8158 - val_loss: 0.4051 - val_accuracy: 0.8006 Epoch 88/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3907 - accuracy: 0.8176 - val_loss: 0.4224 - val_accuracy: 0.7933 Epoch 89/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3941 - accuracy: 0.8148 - val_loss: 0.4324 - val_accuracy: 0.7890 Epoch 90/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3984 - accuracy: 0.8109 - val_loss: 0.4727 - val_accuracy: 0.7782 Epoch 91/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3965 - accuracy: 0.8128 - val_loss: 0.4243 - val_accuracy: 0.7898 Epoch 92/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3964 - accuracy: 0.8084 - val_loss: 0.4148 - val_accuracy: 0.7956 Epoch 93/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3888 - accuracy: 0.8186 - val_loss: 0.4162 - val_accuracy: 0.7909 Epoch 94/250 162/162 [==============================] - 5s 30ms/step - loss: 0.3880 - accuracy: 0.8195 - val_loss: 0.4005 - val_accuracy: 0.8060 Epoch 95/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3887 - accuracy: 0.8174 - val_loss: 0.4142 - val_accuracy: 0.7940 Epoch 96/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3855 - accuracy: 0.8196 - val_loss: 0.4118 - val_accuracy: 0.7967 Epoch 97/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3966 - accuracy: 0.8099 - val_loss: 0.4057 - val_accuracy: 0.8049 Epoch 98/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3861 - accuracy: 0.8191 - val_loss: 0.4177 - val_accuracy: 0.8002 Epoch 99/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3962 - accuracy: 0.8096 - val_loss: 0.4136 - val_accuracy: 0.7956 Epoch 100/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3886 - accuracy: 0.8161 - val_loss: 0.4195 - val_accuracy: 0.7906 Epoch 101/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3925 - accuracy: 0.8165 - val_loss: 0.4016 - val_accuracy: 0.8026 Epoch 102/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3906 - accuracy: 0.8172 - val_loss: 0.4162 - val_accuracy: 0.8006 Epoch 103/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3883 - accuracy: 0.8192 - val_loss: 0.4019 - val_accuracy: 0.8057 Epoch 104/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3889 - accuracy: 0.8159 - val_loss: 0.4138 - val_accuracy: 0.7967 Epoch 105/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3883 - accuracy: 0.8139 - val_loss: 0.4088 - val_accuracy: 0.8041 Epoch 106/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3880 - accuracy: 0.8186 - val_loss: 0.4095 - val_accuracy: 0.8076 Epoch 107/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3912 - accuracy: 0.8126 - val_loss: 0.4268 - val_accuracy: 0.7882 Epoch 108/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3892 - accuracy: 0.8199 - val_loss: 0.4015 - val_accuracy: 0.8014 Epoch 109/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3863 - accuracy: 0.8218 - val_loss: 0.4062 - val_accuracy: 0.8033 Epoch 110/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3866 - accuracy: 0.8176 - val_loss: 0.4243 - val_accuracy: 0.7902 Epoch 111/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3881 - accuracy: 0.8174 - val_loss: 0.4131 - val_accuracy: 0.8045 Epoch 112/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3860 - accuracy: 0.8225 - val_loss: 0.4104 - val_accuracy: 0.8022 Epoch 113/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3901 - accuracy: 0.8160 - val_loss: 0.4088 - val_accuracy: 0.7987 Epoch 114/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3867 - accuracy: 0.8190 - val_loss: 0.4141 - val_accuracy: 0.8006 Epoch 115/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3873 - accuracy: 0.8200 - val_loss: 0.4016 - val_accuracy: 0.8026 Epoch 116/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3901 - accuracy: 0.8186 - val_loss: 0.4182 - val_accuracy: 0.7944 Epoch 117/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3880 - accuracy: 0.8206 - val_loss: 0.4037 - val_accuracy: 0.8037 Epoch 118/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3875 - accuracy: 0.8194 - val_loss: 0.3996 - val_accuracy: 0.8060 Epoch 119/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3876 - accuracy: 0.8157 - val_loss: 0.3996 - val_accuracy: 0.8068 Epoch 120/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3882 - accuracy: 0.8156 - val_loss: 0.4022 - val_accuracy: 0.8014 Epoch 121/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3863 - accuracy: 0.8185 - val_loss: 0.4019 - val_accuracy: 0.8049 Epoch 122/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3874 - accuracy: 0.8190 - val_loss: 0.4050 - val_accuracy: 0.8057 Epoch 123/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3883 - accuracy: 0.8181 - val_loss: 0.4029 - val_accuracy: 0.8049 Epoch 124/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3895 - accuracy: 0.8164 - val_loss: 0.4010 - val_accuracy: 0.8018 Epoch 125/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3896 - accuracy: 0.8164 - val_loss: 0.4034 - val_accuracy: 0.8037 Epoch 126/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3837 - accuracy: 0.8206 - val_loss: 0.4147 - val_accuracy: 0.8049 Epoch 127/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3870 - accuracy: 0.8199 - val_loss: 0.3977 - val_accuracy: 0.8068 Epoch 128/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3835 - accuracy: 0.8220 - val_loss: 0.3957 - val_accuracy: 0.8057 Epoch 129/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3886 - accuracy: 0.8189 - val_loss: 0.4060 - val_accuracy: 0.8041 Epoch 130/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3863 - accuracy: 0.8185 - val_loss: 0.4068 - val_accuracy: 0.8006 Epoch 131/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3870 - accuracy: 0.8188 - val_loss: 0.4063 - val_accuracy: 0.8014 Epoch 132/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3856 - accuracy: 0.8228 - val_loss: 0.4647 - val_accuracy: 0.7716 Epoch 133/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3862 - accuracy: 0.8192 - val_loss: 0.3964 - val_accuracy: 0.8057 Epoch 134/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3819 - accuracy: 0.8231 - val_loss: 0.3994 - val_accuracy: 0.8107 Epoch 135/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3874 - accuracy: 0.8150 - val_loss: 0.4150 - val_accuracy: 0.8002 Epoch 136/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3839 - accuracy: 0.8221 - val_loss: 0.3995 - val_accuracy: 0.8060 Epoch 137/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3852 - accuracy: 0.8175 - val_loss: 0.4168 - val_accuracy: 0.7964 Epoch 138/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3847 - accuracy: 0.8212 - val_loss: 0.4125 - val_accuracy: 0.7929 Epoch 139/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3862 - accuracy: 0.8182 - val_loss: 0.3982 - val_accuracy: 0.8111 Epoch 140/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3852 - accuracy: 0.8228 - val_loss: 0.4015 - val_accuracy: 0.8068 Epoch 141/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3866 - accuracy: 0.8211 - val_loss: 0.3996 - val_accuracy: 0.8057 Epoch 142/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3894 - accuracy: 0.8168 - val_loss: 0.3996 - val_accuracy: 0.8087 Epoch 143/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3835 - accuracy: 0.8222 - val_loss: 0.3975 - val_accuracy: 0.8076 Epoch 144/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3854 - accuracy: 0.8207 - val_loss: 0.4015 - val_accuracy: 0.8072 Epoch 145/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3821 - accuracy: 0.8245 - val_loss: 0.4040 - val_accuracy: 0.8080 Epoch 146/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3816 - accuracy: 0.8237 - val_loss: 0.4016 - val_accuracy: 0.8130 Epoch 147/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3832 - accuracy: 0.8216 - val_loss: 0.4053 - val_accuracy: 0.8087 Epoch 148/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3860 - accuracy: 0.8202 - val_loss: 0.4008 - val_accuracy: 0.8053 Epoch 149/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3856 - accuracy: 0.8219 - val_loss: 0.4006 - val_accuracy: 0.8091 Epoch 150/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3885 - accuracy: 0.8199 - val_loss: 0.4390 - val_accuracy: 0.7840 Epoch 151/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3875 - accuracy: 0.8175 - val_loss: 0.4416 - val_accuracy: 0.7801 Epoch 152/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3857 - accuracy: 0.8187 - val_loss: 0.4127 - val_accuracy: 0.7956 Epoch 153/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3905 - accuracy: 0.8166 - val_loss: 0.4128 - val_accuracy: 0.7979 Epoch 154/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3845 - accuracy: 0.8206 - val_loss: 0.4064 - val_accuracy: 0.8010 Epoch 155/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3847 - accuracy: 0.8209 - val_loss: 0.4100 - val_accuracy: 0.8022 Epoch 156/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3847 - accuracy: 0.8213 - val_loss: 0.4073 - val_accuracy: 0.8115 Epoch 157/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3868 - accuracy: 0.8174 - val_loss: 0.4090 - val_accuracy: 0.8057 Epoch 158/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3863 - accuracy: 0.8183 - val_loss: 0.3993 - val_accuracy: 0.8018 Epoch 159/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3855 - accuracy: 0.8199 - val_loss: 0.4265 - val_accuracy: 0.7952 Epoch 160/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3877 - accuracy: 0.8205 - val_loss: 0.3984 - val_accuracy: 0.8049 Epoch 161/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3890 - accuracy: 0.8228 - val_loss: 0.4074 - val_accuracy: 0.8026 Epoch 162/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3840 - accuracy: 0.8202 - val_loss: 0.3998 - val_accuracy: 0.8033 Epoch 163/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3856 - accuracy: 0.8183 - val_loss: 0.4067 - val_accuracy: 0.8022 Epoch 164/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3926 - accuracy: 0.8143 - val_loss: 0.3997 - val_accuracy: 0.8057 Epoch 165/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3840 - accuracy: 0.8205 - val_loss: 0.4043 - val_accuracy: 0.8068 Epoch 166/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3853 - accuracy: 0.8208 - val_loss: 0.4020 - val_accuracy: 0.8014 Epoch 167/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3852 - accuracy: 0.8149 - val_loss: 0.3946 - val_accuracy: 0.8084 Epoch 168/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3836 - accuracy: 0.8196 - val_loss: 0.3986 - val_accuracy: 0.8060 Epoch 169/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3830 - accuracy: 0.8207 - val_loss: 0.4153 - val_accuracy: 0.7967 Epoch 170/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3835 - accuracy: 0.8222 - val_loss: 0.4008 - val_accuracy: 0.8060 Epoch 171/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3823 - accuracy: 0.8222 - val_loss: 0.4058 - val_accuracy: 0.7991 Epoch 172/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3838 - accuracy: 0.8226 - val_loss: 0.4121 - val_accuracy: 0.8022 Epoch 173/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3818 - accuracy: 0.8243 - val_loss: 0.4097 - val_accuracy: 0.8068 Epoch 174/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3852 - accuracy: 0.8227 - val_loss: 0.3965 - val_accuracy: 0.8068 Epoch 175/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3852 - accuracy: 0.8192 - val_loss: 0.4027 - val_accuracy: 0.8057 Epoch 176/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3891 - accuracy: 0.8148 - val_loss: 0.4072 - val_accuracy: 0.8064 Epoch 177/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3915 - accuracy: 0.8118 - val_loss: 0.4297 - val_accuracy: 0.7890 Epoch 178/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3824 - accuracy: 0.8256 - val_loss: 0.4325 - val_accuracy: 0.7878 Epoch 179/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3863 - accuracy: 0.8204 - val_loss: 0.3980 - val_accuracy: 0.8064 Epoch 180/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3824 - accuracy: 0.8237 - val_loss: 0.4081 - val_accuracy: 0.7991 Epoch 181/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3861 - accuracy: 0.8189 - val_loss: 0.4207 - val_accuracy: 0.7971 Epoch 182/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3885 - accuracy: 0.8221 - val_loss: 0.4134 - val_accuracy: 0.8072 Epoch 183/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3844 - accuracy: 0.8211 - val_loss: 0.4032 - val_accuracy: 0.8060 Epoch 184/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3841 - accuracy: 0.8255 - val_loss: 0.3982 - val_accuracy: 0.8064 Epoch 185/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3817 - accuracy: 0.8222 - val_loss: 0.4164 - val_accuracy: 0.7960 Epoch 186/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3854 - accuracy: 0.8213 - val_loss: 0.3954 - val_accuracy: 0.8076 Epoch 187/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3826 - accuracy: 0.8232 - val_loss: 0.4020 - val_accuracy: 0.8041 Epoch 188/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3841 - accuracy: 0.8205 - val_loss: 0.3970 - val_accuracy: 0.8053 Epoch 189/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3836 - accuracy: 0.8223 - val_loss: 0.4042 - val_accuracy: 0.8033 Epoch 190/250 162/162 [==============================] - 4s 27ms/step - loss: 0.3861 - accuracy: 0.8182 - val_loss: 0.3988 - val_accuracy: 0.8080 Epoch 191/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3811 - accuracy: 0.8209 - val_loss: 0.3974 - val_accuracy: 0.8041 Epoch 192/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3797 - accuracy: 0.8238 - val_loss: 0.4118 - val_accuracy: 0.8095 Epoch 193/250 162/162 [==============================] - 4s 28ms/step - loss: 0.3835 - accuracy: 0.8222 - val_loss: 0.4008 - val_accuracy: 0.8091 Epoch 194/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3851 - accuracy: 0.8218 - val_loss: 0.4025 - val_accuracy: 0.8080 Epoch 195/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3845 - accuracy: 0.8221 - val_loss: 0.4198 - val_accuracy: 0.7995 Epoch 196/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3827 - accuracy: 0.8229 - val_loss: 0.4021 - val_accuracy: 0.8076 Epoch 197/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3818 - accuracy: 0.8230 - val_loss: 0.4363 - val_accuracy: 0.7902 Epoch 198/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3839 - accuracy: 0.8211 - val_loss: 0.3978 - val_accuracy: 0.8068 Epoch 199/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3878 - accuracy: 0.8180 - val_loss: 0.3945 - val_accuracy: 0.8080 Epoch 200/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3846 - accuracy: 0.8223 - val_loss: 0.4157 - val_accuracy: 0.7967 Epoch 201/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3831 - accuracy: 0.8241 - val_loss: 0.4167 - val_accuracy: 0.7944 Epoch 202/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3867 - accuracy: 0.8209 - val_loss: 0.4100 - val_accuracy: 0.8060 Epoch 203/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3825 - accuracy: 0.8228 - val_loss: 0.3985 - val_accuracy: 0.8057 Epoch 204/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3826 - accuracy: 0.8222 - val_loss: 0.4265 - val_accuracy: 0.7855 Epoch 205/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3837 - accuracy: 0.8178 - val_loss: 0.4097 - val_accuracy: 0.8084 Epoch 206/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3831 - accuracy: 0.8217 - val_loss: 0.4020 - val_accuracy: 0.8060 Epoch 207/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3820 - accuracy: 0.8228 - val_loss: 0.4007 - val_accuracy: 0.8064 Epoch 208/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3853 - accuracy: 0.8186 - val_loss: 0.4132 - val_accuracy: 0.7998 Epoch 209/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3882 - accuracy: 0.8192 - val_loss: 0.4197 - val_accuracy: 0.8006 Epoch 210/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3807 - accuracy: 0.8249 - val_loss: 0.3972 - val_accuracy: 0.8084 Epoch 211/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3823 - accuracy: 0.8238 - val_loss: 0.4036 - val_accuracy: 0.8037 Epoch 212/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3846 - accuracy: 0.8208 - val_loss: 0.3985 - val_accuracy: 0.8111 Epoch 213/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3799 - accuracy: 0.8239 - val_loss: 0.4134 - val_accuracy: 0.8010 Epoch 214/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3808 - accuracy: 0.8238 - val_loss: 0.4032 - val_accuracy: 0.8099 Epoch 215/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3854 - accuracy: 0.8230 - val_loss: 0.3933 - val_accuracy: 0.8107 Epoch 216/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3791 - accuracy: 0.8248 - val_loss: 0.4062 - val_accuracy: 0.8006 Epoch 217/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3800 - accuracy: 0.8265 - val_loss: 0.4002 - val_accuracy: 0.8111 Epoch 218/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3854 - accuracy: 0.8228 - val_loss: 0.3929 - val_accuracy: 0.8076 Epoch 219/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3778 - accuracy: 0.8262 - val_loss: 0.4278 - val_accuracy: 0.7948 Epoch 220/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3849 - accuracy: 0.8207 - val_loss: 0.4273 - val_accuracy: 0.7882 Epoch 221/250 162/162 [==============================] - 5s 28ms/step - loss: 0.3813 - accuracy: 0.8220 - val_loss: 0.4009 - val_accuracy: 0.8064 Epoch 222/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3822 - accuracy: 0.8258 - val_loss: 0.3988 - val_accuracy: 0.8080 Epoch 223/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3793 - accuracy: 0.8269 - val_loss: 0.3960 - val_accuracy: 0.8099 Epoch 224/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3816 - accuracy: 0.8229 - val_loss: 0.4193 - val_accuracy: 0.8010 Epoch 225/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3823 - accuracy: 0.8233 - val_loss: 0.4032 - val_accuracy: 0.8087 Epoch 226/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3817 - accuracy: 0.8251 - val_loss: 0.4094 - val_accuracy: 0.8018 Epoch 227/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3804 - accuracy: 0.8244 - val_loss: 0.3976 - val_accuracy: 0.8037 Epoch 228/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3815 - accuracy: 0.8268 - val_loss: 0.3958 - val_accuracy: 0.8095 Epoch 229/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3829 - accuracy: 0.8250 - val_loss: 0.3995 - val_accuracy: 0.8146 Epoch 230/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3785 - accuracy: 0.8255 - val_loss: 0.3983 - val_accuracy: 0.8118 Epoch 231/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3799 - accuracy: 0.8250 - val_loss: 0.3979 - val_accuracy: 0.8103 Epoch 232/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3800 - accuracy: 0.8238 - val_loss: 0.4096 - val_accuracy: 0.8022 Epoch 233/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3765 - accuracy: 0.8273 - val_loss: 0.3927 - val_accuracy: 0.8130 Epoch 234/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3806 - accuracy: 0.8273 - val_loss: 0.3957 - val_accuracy: 0.8118 Epoch 235/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3788 - accuracy: 0.8289 - val_loss: 0.4061 - val_accuracy: 0.8099 Epoch 236/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3769 - accuracy: 0.8262 - val_loss: 0.3916 - val_accuracy: 0.8138 Epoch 237/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3805 - accuracy: 0.8258 - val_loss: 0.3986 - val_accuracy: 0.8107 Epoch 238/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3771 - accuracy: 0.8287 - val_loss: 0.4242 - val_accuracy: 0.7991 Epoch 239/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3804 - accuracy: 0.8247 - val_loss: 0.3964 - val_accuracy: 0.8103 Epoch 240/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3775 - accuracy: 0.8276 - val_loss: 0.3974 - val_accuracy: 0.8118 Epoch 241/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3766 - accuracy: 0.8270 - val_loss: 0.4111 - val_accuracy: 0.8010 Epoch 242/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3782 - accuracy: 0.8274 - val_loss: 0.3921 - val_accuracy: 0.8180 Epoch 243/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3806 - accuracy: 0.8278 - val_loss: 0.3927 - val_accuracy: 0.8177 Epoch 244/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3778 - accuracy: 0.8289 - val_loss: 0.3935 - val_accuracy: 0.8146 Epoch 245/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3787 - accuracy: 0.8268 - val_loss: 0.3965 - val_accuracy: 0.8157 Epoch 246/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3780 - accuracy: 0.8288 - val_loss: 0.3958 - val_accuracy: 0.8149 Epoch 247/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3828 - accuracy: 0.8209 - val_loss: 0.3970 - val_accuracy: 0.8068 Epoch 248/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3760 - accuracy: 0.8273 - val_loss: 0.3926 - val_accuracy: 0.8134 Epoch 249/250 162/162 [==============================] - 4s 26ms/step - loss: 0.3841 - accuracy: 0.8225 - val_loss: 0.3933 - val_accuracy: 0.8111 Epoch 250/250 162/162 [==============================] - 4s 27ms/step - loss: 0.3765 - accuracy: 0.8291 - val_loss: 0.4005 - val_accuracy: 0.8146
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_2_g.history['accuracy']
train_loss = history_2_g.history['loss']
test_acc = history_2_g.history['val_accuracy']
test_loss = history_2_g.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
lstm_1_g.save('Trained Models/lstm_1_g.h5')
x_train, x_test, y_train, y_test = X_train, X_test, y2_train, y2_test
size = 13
x_train = np.array(x_train)
y_train = np.array(y_train)
x_test = np.array(x_test)
y_test = np.array(y_test)
x_train = x_train.reshape(x_train.shape[0], size, 1)
x_test = x_test.reshape(x_test.shape[0], size, 1)
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# Create Model
lstm_1_e = Sequential()
lstm_1_e.add(Dense(256, activation='relu', input_shape=(x_train.shape[1],1)))
lstm_1_e.add(Dense(128, activation='relu'))
lstm_1_e.add(Dense(64, activation='relu'))
lstm_1_e.add(Dense(32, activation='relu'))
lstm_1_e.add(Flatten())
lstm_1_e.add(Dense(16, activation='relu'))
lstm_1_e.add(Dense(10, activation='relu'))
lstm_1_e.add(Dense(8, activation='relu'))
lstm_1_e.add(Flatten())
lstm_1_e.add(Dense(7, activation='softmax'))
lstm_1_e.compile(optimizer = 'adam' , loss = 'categorical_crossentropy' , metrics = ['accuracy'])
lstm_1_e.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_11 (Dense) (None, 13, 256) 512 _________________________________________________________________ dense_12 (Dense) (None, 13, 128) 32896 _________________________________________________________________ dense_13 (Dense) (None, 13, 64) 8256 _________________________________________________________________ dense_14 (Dense) (None, 13, 32) 2080 _________________________________________________________________ flatten_3 (Flatten) (None, 416) 0 _________________________________________________________________ dense_15 (Dense) (None, 16) 6672 _________________________________________________________________ dense_16 (Dense) (None, 10) 170 _________________________________________________________________ dense_17 (Dense) (None, 8) 88 _________________________________________________________________ flatten_4 (Flatten) (None, 8) 0 _________________________________________________________________ dense_18 (Dense) (None, 7) 63 ================================================================= Total params: 50,737 Trainable params: 50,737 Non-trainable params: 0 _________________________________________________________________
history_2_e = lstm_1_e.fit(x_train, y_train, epochs=250, batch_size=16, validation_split=0.1)
Epoch 1/250 727/727 [==============================] - 15s 21ms/step - loss: 1.7595 - accuracy: 0.2800 - val_loss: 1.5582 - val_accuracy: 0.3901 Epoch 2/250 727/727 [==============================] - 15s 20ms/step - loss: 1.5375 - accuracy: 0.3900 - val_loss: 1.4874 - val_accuracy: 0.4319 Epoch 3/250 727/727 [==============================] - 15s 20ms/step - loss: 1.4745 - accuracy: 0.4134 - val_loss: 1.4613 - val_accuracy: 0.4404 Epoch 4/250 727/727 [==============================] - 15s 20ms/step - loss: 1.4336 - accuracy: 0.4366 - val_loss: 1.4151 - val_accuracy: 0.4567 Epoch 5/250 727/727 [==============================] - 15s 20ms/step - loss: 1.4032 - accuracy: 0.4563 - val_loss: 1.3890 - val_accuracy: 0.4582 Epoch 6/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3861 - accuracy: 0.4542 - val_loss: 1.3842 - val_accuracy: 0.4582 Epoch 7/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3706 - accuracy: 0.4667 - val_loss: 1.3717 - val_accuracy: 0.4551 Epoch 8/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3593 - accuracy: 0.4699 - val_loss: 1.3458 - val_accuracy: 0.4791 Epoch 9/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3477 - accuracy: 0.4699 - val_loss: 1.3408 - val_accuracy: 0.4752 Epoch 10/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3435 - accuracy: 0.4762 - val_loss: 1.3295 - val_accuracy: 0.4837 Epoch 11/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3329 - accuracy: 0.4804 - val_loss: 1.3288 - val_accuracy: 0.4923 Epoch 12/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3271 - accuracy: 0.4796 - val_loss: 1.3439 - val_accuracy: 0.4837 Epoch 13/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3202 - accuracy: 0.4829 - val_loss: 1.3470 - val_accuracy: 0.4706 Epoch 14/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3144 - accuracy: 0.4862 - val_loss: 1.3113 - val_accuracy: 0.4899 Epoch 15/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3120 - accuracy: 0.4865 - val_loss: 1.3354 - val_accuracy: 0.4551 Epoch 16/250 727/727 [==============================] - 15s 20ms/step - loss: 1.3044 - accuracy: 0.4920 - val_loss: 1.3280 - val_accuracy: 0.4745 Epoch 17/250 727/727 [==============================] - 15s 21ms/step - loss: 1.3023 - accuracy: 0.4912 - val_loss: 1.3002 - val_accuracy: 0.4837 Epoch 18/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2976 - accuracy: 0.4935 - val_loss: 1.3259 - val_accuracy: 0.4876 Epoch 19/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2920 - accuracy: 0.4920 - val_loss: 1.3167 - val_accuracy: 0.4837 Epoch 20/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2902 - accuracy: 0.4952 - val_loss: 1.3234 - val_accuracy: 0.4783 Epoch 21/250 727/727 [==============================] - 14s 20ms/step - loss: 1.2860 - accuracy: 0.4900 - val_loss: 1.2824 - val_accuracy: 0.4946 Epoch 22/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2852 - accuracy: 0.4940 - val_loss: 1.2751 - val_accuracy: 0.5062 Epoch 23/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2844 - accuracy: 0.4984 - val_loss: 1.3042 - val_accuracy: 0.4876 Epoch 24/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2787 - accuracy: 0.4961 - val_loss: 1.2778 - val_accuracy: 0.5031 Epoch 25/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2749 - accuracy: 0.4982 - val_loss: 1.2815 - val_accuracy: 0.4892 Epoch 26/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2712 - accuracy: 0.5011 - val_loss: 1.3010 - val_accuracy: 0.4907 Epoch 27/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2734 - accuracy: 0.5011 - val_loss: 1.3209 - val_accuracy: 0.4814 Epoch 28/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2656 - accuracy: 0.5031 - val_loss: 1.2789 - val_accuracy: 0.5046 Epoch 29/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2617 - accuracy: 0.5013 - val_loss: 1.3818 - val_accuracy: 0.4474 Epoch 30/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2626 - accuracy: 0.5014 - val_loss: 1.2768 - val_accuracy: 0.5062 Epoch 31/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2610 - accuracy: 0.5039 - val_loss: 1.2890 - val_accuracy: 0.4837 Epoch 32/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2627 - accuracy: 0.5059 - val_loss: 1.2690 - val_accuracy: 0.5039 Epoch 33/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2628 - accuracy: 0.5055 - val_loss: 1.2770 - val_accuracy: 0.5039 Epoch 34/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2536 - accuracy: 0.5051 - val_loss: 1.2776 - val_accuracy: 0.4930 Epoch 35/250 727/727 [==============================] - 14s 20ms/step - loss: 1.2579 - accuracy: 0.5054 - val_loss: 1.2688 - val_accuracy: 0.4992 Epoch 36/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2515 - accuracy: 0.5091 - val_loss: 1.2666 - val_accuracy: 0.5046 Epoch 37/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2512 - accuracy: 0.5084 - val_loss: 1.2824 - val_accuracy: 0.4899 Epoch 38/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2478 - accuracy: 0.5085 - val_loss: 1.3862 - val_accuracy: 0.4543 Epoch 39/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2487 - accuracy: 0.5027 - val_loss: 1.2525 - val_accuracy: 0.5062 Epoch 40/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2433 - accuracy: 0.5112 - val_loss: 1.2647 - val_accuracy: 0.5077 Epoch 41/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2474 - accuracy: 0.5041 - val_loss: 1.2666 - val_accuracy: 0.5139 Epoch 42/250 727/727 [==============================] - 15s 21ms/step - loss: 1.2361 - accuracy: 0.5136 - val_loss: 1.2922 - val_accuracy: 0.4946 Epoch 43/250 727/727 [==============================] - 15s 21ms/step - loss: 1.2352 - accuracy: 0.5141 - val_loss: 1.2703 - val_accuracy: 0.4954 Epoch 44/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2392 - accuracy: 0.5086 - val_loss: 1.2621 - val_accuracy: 0.5015 Epoch 45/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2380 - accuracy: 0.5074 - val_loss: 1.2774 - val_accuracy: 0.5101 Epoch 46/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2311 - accuracy: 0.5165 - val_loss: 1.2667 - val_accuracy: 0.5062 Epoch 47/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2274 - accuracy: 0.5150 - val_loss: 1.2976 - val_accuracy: 0.4907 Epoch 48/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2317 - accuracy: 0.5118 - val_loss: 1.2615 - val_accuracy: 0.4946 Epoch 49/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2289 - accuracy: 0.5118 - val_loss: 1.2949 - val_accuracy: 0.5039 Epoch 50/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2271 - accuracy: 0.5157 - val_loss: 1.2591 - val_accuracy: 0.5101 Epoch 51/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2223 - accuracy: 0.5148 - val_loss: 1.2471 - val_accuracy: 0.5124 Epoch 52/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2248 - accuracy: 0.5185 - val_loss: 1.2592 - val_accuracy: 0.5062 Epoch 53/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2235 - accuracy: 0.5185 - val_loss: 1.2565 - val_accuracy: 0.5093 Epoch 54/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2202 - accuracy: 0.5162 - val_loss: 1.2493 - val_accuracy: 0.5046 Epoch 55/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2203 - accuracy: 0.5222 - val_loss: 1.2804 - val_accuracy: 0.4977 Epoch 56/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2158 - accuracy: 0.5236 - val_loss: 1.2874 - val_accuracy: 0.5046 Epoch 57/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2231 - accuracy: 0.5178 - val_loss: 1.3122 - val_accuracy: 0.4907 Epoch 58/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2156 - accuracy: 0.5245 - val_loss: 1.2426 - val_accuracy: 0.5263 Epoch 59/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2108 - accuracy: 0.5245 - val_loss: 1.2634 - val_accuracy: 0.5046 Epoch 60/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2130 - accuracy: 0.5223 - val_loss: 1.2512 - val_accuracy: 0.5116 Epoch 61/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2106 - accuracy: 0.5231 - val_loss: 1.2433 - val_accuracy: 0.5093 Epoch 62/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2062 - accuracy: 0.5251 - val_loss: 1.2499 - val_accuracy: 0.5046 Epoch 63/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2075 - accuracy: 0.5221 - val_loss: 1.2402 - val_accuracy: 0.5054 Epoch 64/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2093 - accuracy: 0.5267 - val_loss: 1.2571 - val_accuracy: 0.5132 Epoch 65/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2013 - accuracy: 0.5271 - val_loss: 1.2359 - val_accuracy: 0.5248 Epoch 66/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2077 - accuracy: 0.5192 - val_loss: 1.2316 - val_accuracy: 0.5317 Epoch 67/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2016 - accuracy: 0.5252 - val_loss: 1.2416 - val_accuracy: 0.5163 Epoch 68/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2049 - accuracy: 0.5236 - val_loss: 1.2545 - val_accuracy: 0.5077 Epoch 69/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2031 - accuracy: 0.5253 - val_loss: 1.2876 - val_accuracy: 0.5000 Epoch 70/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2064 - accuracy: 0.5244 - val_loss: 1.2481 - val_accuracy: 0.5000 Epoch 71/250 727/727 [==============================] - 15s 20ms/step - loss: 1.2031 - accuracy: 0.5268 - val_loss: 1.2847 - val_accuracy: 0.5015 Epoch 72/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1997 - accuracy: 0.5274 - val_loss: 1.2214 - val_accuracy: 0.5248 Epoch 73/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1971 - accuracy: 0.5297 - val_loss: 1.2378 - val_accuracy: 0.5062 Epoch 74/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1969 - accuracy: 0.5265 - val_loss: 1.2786 - val_accuracy: 0.4977 Epoch 75/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1947 - accuracy: 0.5281 - val_loss: 1.2371 - val_accuracy: 0.5085 Epoch 76/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1978 - accuracy: 0.5247 - val_loss: 1.2349 - val_accuracy: 0.5186 Epoch 77/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1948 - accuracy: 0.5275 - val_loss: 1.2326 - val_accuracy: 0.5232 Epoch 78/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1891 - accuracy: 0.5271 - val_loss: 1.2196 - val_accuracy: 0.5341 Epoch 79/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1933 - accuracy: 0.5281 - val_loss: 1.2548 - val_accuracy: 0.5139 Epoch 80/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1915 - accuracy: 0.5327 - val_loss: 1.2581 - val_accuracy: 0.5108 Epoch 81/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1925 - accuracy: 0.5244 - val_loss: 1.2259 - val_accuracy: 0.5286 Epoch 82/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1925 - accuracy: 0.5323 - val_loss: 1.2209 - val_accuracy: 0.5248 Epoch 83/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1910 - accuracy: 0.5304 - val_loss: 1.2266 - val_accuracy: 0.5163 Epoch 84/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1916 - accuracy: 0.5290 - val_loss: 1.2499 - val_accuracy: 0.5031 Epoch 85/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1867 - accuracy: 0.5324 - val_loss: 1.2621 - val_accuracy: 0.5124 Epoch 86/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1895 - accuracy: 0.5338 - val_loss: 1.2300 - val_accuracy: 0.5209 Epoch 87/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1884 - accuracy: 0.5330 - val_loss: 1.2287 - val_accuracy: 0.5224 Epoch 88/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1828 - accuracy: 0.5330 - val_loss: 1.2186 - val_accuracy: 0.5139 Epoch 89/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1848 - accuracy: 0.5308 - val_loss: 1.2289 - val_accuracy: 0.5286 Epoch 90/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1872 - accuracy: 0.5333 - val_loss: 1.2189 - val_accuracy: 0.5294 Epoch 91/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1802 - accuracy: 0.5364 - val_loss: 1.2472 - val_accuracy: 0.5193 Epoch 92/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1794 - accuracy: 0.5380 - val_loss: 1.2142 - val_accuracy: 0.5193 Epoch 93/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1829 - accuracy: 0.5321 - val_loss: 1.2246 - val_accuracy: 0.5410 Epoch 94/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1834 - accuracy: 0.5338 - val_loss: 1.2300 - val_accuracy: 0.5186 Epoch 95/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1803 - accuracy: 0.5344 - val_loss: 1.2656 - val_accuracy: 0.5062 Epoch 96/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1806 - accuracy: 0.5327 - val_loss: 1.2302 - val_accuracy: 0.5070 Epoch 97/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1797 - accuracy: 0.5349 - val_loss: 1.2364 - val_accuracy: 0.5108 Epoch 98/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1773 - accuracy: 0.5357 - val_loss: 1.2248 - val_accuracy: 0.5341 Epoch 99/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1744 - accuracy: 0.5344 - val_loss: 1.2123 - val_accuracy: 0.5170 Epoch 100/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1779 - accuracy: 0.5323 - val_loss: 1.2161 - val_accuracy: 0.5294 Epoch 101/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1804 - accuracy: 0.5348 - val_loss: 1.2228 - val_accuracy: 0.5356 Epoch 102/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1781 - accuracy: 0.5371 - val_loss: 1.2116 - val_accuracy: 0.5356 Epoch 103/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1730 - accuracy: 0.5378 - val_loss: 1.2328 - val_accuracy: 0.5178 Epoch 104/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1768 - accuracy: 0.5297 - val_loss: 1.2065 - val_accuracy: 0.5248 Epoch 105/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1725 - accuracy: 0.5356 - val_loss: 1.2052 - val_accuracy: 0.5379 Epoch 106/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1716 - accuracy: 0.5411 - val_loss: 1.2218 - val_accuracy: 0.5271 Epoch 107/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1750 - accuracy: 0.5392 - val_loss: 1.2093 - val_accuracy: 0.5193 Epoch 108/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1728 - accuracy: 0.5379 - val_loss: 1.2121 - val_accuracy: 0.5387 Epoch 109/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1745 - accuracy: 0.5379 - val_loss: 1.2394 - val_accuracy: 0.5279 Epoch 110/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1704 - accuracy: 0.5407 - val_loss: 1.2106 - val_accuracy: 0.5395 Epoch 111/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1699 - accuracy: 0.5396 - val_loss: 1.2024 - val_accuracy: 0.5356 Epoch 112/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1679 - accuracy: 0.5416 - val_loss: 1.2287 - val_accuracy: 0.5302 Epoch 113/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1744 - accuracy: 0.5380 - val_loss: 1.2775 - val_accuracy: 0.5178 Epoch 114/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1713 - accuracy: 0.5392 - val_loss: 1.2229 - val_accuracy: 0.5302 Epoch 115/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1711 - accuracy: 0.5390 - val_loss: 1.2470 - val_accuracy: 0.5224 Epoch 116/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1668 - accuracy: 0.5404 - val_loss: 1.2181 - val_accuracy: 0.5224 Epoch 117/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1720 - accuracy: 0.5404 - val_loss: 1.2085 - val_accuracy: 0.5387 Epoch 118/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1678 - accuracy: 0.5389 - val_loss: 1.2232 - val_accuracy: 0.5224 Epoch 119/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1658 - accuracy: 0.5400 - val_loss: 1.2409 - val_accuracy: 0.5209 Epoch 120/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1681 - accuracy: 0.5379 - val_loss: 1.2013 - val_accuracy: 0.5526 Epoch 121/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1665 - accuracy: 0.5443 - val_loss: 1.2249 - val_accuracy: 0.5255 Epoch 122/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1642 - accuracy: 0.5420 - val_loss: 1.2127 - val_accuracy: 0.5356 Epoch 123/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1627 - accuracy: 0.5447 - val_loss: 1.2069 - val_accuracy: 0.5441 Epoch 124/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1634 - accuracy: 0.5417 - val_loss: 1.2195 - val_accuracy: 0.5193 Epoch 125/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1627 - accuracy: 0.5436 - val_loss: 1.2088 - val_accuracy: 0.5433 Epoch 126/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1612 - accuracy: 0.5445 - val_loss: 1.2112 - val_accuracy: 0.5410 Epoch 127/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1653 - accuracy: 0.5421 - val_loss: 1.2064 - val_accuracy: 0.5433 Epoch 128/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1568 - accuracy: 0.5481 - val_loss: 1.2061 - val_accuracy: 0.5286 Epoch 129/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1620 - accuracy: 0.5418 - val_loss: 1.2216 - val_accuracy: 0.5271 Epoch 130/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1617 - accuracy: 0.5389 - val_loss: 1.2075 - val_accuracy: 0.5426 Epoch 131/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1593 - accuracy: 0.5451 - val_loss: 1.2218 - val_accuracy: 0.5263 Epoch 132/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1587 - accuracy: 0.5390 - val_loss: 1.2385 - val_accuracy: 0.5263 Epoch 133/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1582 - accuracy: 0.5414 - val_loss: 1.2226 - val_accuracy: 0.4961 Epoch 134/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1593 - accuracy: 0.5449 - val_loss: 1.2121 - val_accuracy: 0.5294 Epoch 135/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1552 - accuracy: 0.5438 - val_loss: 1.2397 - val_accuracy: 0.5441 Epoch 136/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1598 - accuracy: 0.5424 - val_loss: 1.2246 - val_accuracy: 0.5410 Epoch 137/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1561 - accuracy: 0.5419 - val_loss: 1.2240 - val_accuracy: 0.5286 Epoch 138/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1563 - accuracy: 0.5437 - val_loss: 1.2345 - val_accuracy: 0.5224 Epoch 139/250 727/727 [==============================] - 16s 22ms/step - loss: 1.1575 - accuracy: 0.5421 - val_loss: 1.2237 - val_accuracy: 0.5248 Epoch 140/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1541 - accuracy: 0.5452 - val_loss: 1.2345 - val_accuracy: 0.5341 Epoch 141/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1556 - accuracy: 0.5437 - val_loss: 1.2050 - val_accuracy: 0.5348 Epoch 142/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1516 - accuracy: 0.5435 - val_loss: 1.2448 - val_accuracy: 0.5286 Epoch 143/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1548 - accuracy: 0.5392 - val_loss: 1.2154 - val_accuracy: 0.5472 Epoch 144/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1550 - accuracy: 0.5472 - val_loss: 1.2646 - val_accuracy: 0.5070 Epoch 145/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1572 - accuracy: 0.5443 - val_loss: 1.2339 - val_accuracy: 0.5132 Epoch 146/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1543 - accuracy: 0.5459 - val_loss: 1.2382 - val_accuracy: 0.5310 Epoch 147/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1527 - accuracy: 0.5454 - val_loss: 1.2137 - val_accuracy: 0.5193 Epoch 148/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1499 - accuracy: 0.5469 - val_loss: 1.2334 - val_accuracy: 0.5294 Epoch 149/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1535 - accuracy: 0.5416 - val_loss: 1.2084 - val_accuracy: 0.5387 Epoch 150/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1514 - accuracy: 0.5438 - val_loss: 1.2190 - val_accuracy: 0.5271 Epoch 151/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1504 - accuracy: 0.5478 - val_loss: 1.2908 - val_accuracy: 0.5108 Epoch 152/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1574 - accuracy: 0.5419 - val_loss: 1.2162 - val_accuracy: 0.5294 Epoch 153/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1500 - accuracy: 0.5489 - val_loss: 1.2036 - val_accuracy: 0.5348 Epoch 154/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1502 - accuracy: 0.5486 - val_loss: 1.2018 - val_accuracy: 0.5387 Epoch 155/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1475 - accuracy: 0.5487 - val_loss: 1.3168 - val_accuracy: 0.4930 Epoch 156/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1523 - accuracy: 0.5471 - val_loss: 1.2090 - val_accuracy: 0.5271 Epoch 157/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1495 - accuracy: 0.5511 - val_loss: 1.2442 - val_accuracy: 0.5201 Epoch 158/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1517 - accuracy: 0.5461 - val_loss: 1.2190 - val_accuracy: 0.5248 Epoch 159/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1520 - accuracy: 0.5430 - val_loss: 1.2196 - val_accuracy: 0.5333 Epoch 160/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1441 - accuracy: 0.5500 - val_loss: 1.2220 - val_accuracy: 0.5178 Epoch 161/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1483 - accuracy: 0.5493 - val_loss: 1.2205 - val_accuracy: 0.5379 Epoch 162/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1453 - accuracy: 0.5482 - val_loss: 1.2472 - val_accuracy: 0.5255 Epoch 163/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1448 - accuracy: 0.5498 - val_loss: 1.2193 - val_accuracy: 0.5402 Epoch 164/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1521 - accuracy: 0.5482 - val_loss: 1.2399 - val_accuracy: 0.5101 Epoch 165/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1453 - accuracy: 0.5496 - val_loss: 1.2211 - val_accuracy: 0.5333 Epoch 166/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1477 - accuracy: 0.5476 - val_loss: 1.2201 - val_accuracy: 0.5286 Epoch 167/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1442 - accuracy: 0.5504 - val_loss: 1.2386 - val_accuracy: 0.5132 Epoch 168/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1463 - accuracy: 0.5489 - val_loss: 1.2173 - val_accuracy: 0.5395 Epoch 169/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1443 - accuracy: 0.5447 - val_loss: 1.2316 - val_accuracy: 0.5255 Epoch 170/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1485 - accuracy: 0.5486 - val_loss: 1.2120 - val_accuracy: 0.5372 Epoch 171/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1428 - accuracy: 0.5499 - val_loss: 1.2321 - val_accuracy: 0.5263 Epoch 172/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1424 - accuracy: 0.5471 - val_loss: 1.2187 - val_accuracy: 0.5356 Epoch 173/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1384 - accuracy: 0.5504 - val_loss: 1.2156 - val_accuracy: 0.5325 Epoch 174/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1437 - accuracy: 0.5508 - val_loss: 1.2725 - val_accuracy: 0.5155 Epoch 175/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1480 - accuracy: 0.5493 - val_loss: 1.2179 - val_accuracy: 0.5240 Epoch 176/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1403 - accuracy: 0.5535 - val_loss: 1.2240 - val_accuracy: 0.5232 Epoch 177/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1440 - accuracy: 0.5509 - val_loss: 1.2189 - val_accuracy: 0.5271 Epoch 178/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1386 - accuracy: 0.5518 - val_loss: 1.2515 - val_accuracy: 0.5163 Epoch 179/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1359 - accuracy: 0.5543 - val_loss: 1.2071 - val_accuracy: 0.5217 Epoch 180/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1380 - accuracy: 0.5526 - val_loss: 1.2599 - val_accuracy: 0.5132 Epoch 181/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1412 - accuracy: 0.5520 - val_loss: 1.2036 - val_accuracy: 0.5418 Epoch 182/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1421 - accuracy: 0.5524 - val_loss: 1.2452 - val_accuracy: 0.5193 Epoch 183/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1412 - accuracy: 0.5486 - val_loss: 1.2148 - val_accuracy: 0.5480 Epoch 184/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1400 - accuracy: 0.5526 - val_loss: 1.2093 - val_accuracy: 0.5395 Epoch 185/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1415 - accuracy: 0.5513 - val_loss: 1.2193 - val_accuracy: 0.5294 Epoch 186/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1386 - accuracy: 0.5523 - val_loss: 1.2152 - val_accuracy: 0.5170 Epoch 187/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1362 - accuracy: 0.5595 - val_loss: 1.2962 - val_accuracy: 0.5031 Epoch 188/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1372 - accuracy: 0.5520 - val_loss: 1.2514 - val_accuracy: 0.5170 Epoch 189/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1359 - accuracy: 0.5539 - val_loss: 1.2082 - val_accuracy: 0.5302 Epoch 190/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1332 - accuracy: 0.5557 - val_loss: 1.2051 - val_accuracy: 0.5410 Epoch 191/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1393 - accuracy: 0.5508 - val_loss: 1.2053 - val_accuracy: 0.5426 Epoch 192/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1362 - accuracy: 0.5542 - val_loss: 1.2161 - val_accuracy: 0.5503 Epoch 193/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1377 - accuracy: 0.5489 - val_loss: 1.2533 - val_accuracy: 0.5054 Epoch 194/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1381 - accuracy: 0.5523 - val_loss: 1.2495 - val_accuracy: 0.5085 Epoch 195/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1326 - accuracy: 0.5558 - val_loss: 1.2216 - val_accuracy: 0.5294 Epoch 196/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1374 - accuracy: 0.5540 - val_loss: 1.2161 - val_accuracy: 0.5255 Epoch 197/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1311 - accuracy: 0.5547 - val_loss: 1.2249 - val_accuracy: 0.5232 Epoch 198/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1327 - accuracy: 0.5547 - val_loss: 1.2556 - val_accuracy: 0.5209 Epoch 199/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1340 - accuracy: 0.5552 - val_loss: 1.2166 - val_accuracy: 0.5155 Epoch 200/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1386 - accuracy: 0.5547 - val_loss: 1.2131 - val_accuracy: 0.5163 Epoch 201/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1336 - accuracy: 0.5559 - val_loss: 1.2108 - val_accuracy: 0.5333 Epoch 202/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1303 - accuracy: 0.5561 - val_loss: 1.2122 - val_accuracy: 0.5279 Epoch 203/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1341 - accuracy: 0.5547 - val_loss: 1.2028 - val_accuracy: 0.5325 Epoch 204/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1307 - accuracy: 0.5574 - val_loss: 1.2200 - val_accuracy: 0.5310 Epoch 205/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1326 - accuracy: 0.5582 - val_loss: 1.2190 - val_accuracy: 0.5372 Epoch 206/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1346 - accuracy: 0.5561 - val_loss: 1.2343 - val_accuracy: 0.5348 Epoch 207/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1321 - accuracy: 0.5548 - val_loss: 1.2212 - val_accuracy: 0.5356 Epoch 208/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1381 - accuracy: 0.5548 - val_loss: 1.2033 - val_accuracy: 0.5279 Epoch 209/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1338 - accuracy: 0.5524 - val_loss: 1.2123 - val_accuracy: 0.5232 Epoch 210/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1354 - accuracy: 0.5545 - val_loss: 1.2043 - val_accuracy: 0.5325 Epoch 211/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1332 - accuracy: 0.5507 - val_loss: 1.2262 - val_accuracy: 0.5186 Epoch 212/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1316 - accuracy: 0.5563 - val_loss: 1.2086 - val_accuracy: 0.5224 Epoch 213/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1306 - accuracy: 0.5563 - val_loss: 1.2030 - val_accuracy: 0.5348 Epoch 214/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1351 - accuracy: 0.5538 - val_loss: 1.2123 - val_accuracy: 0.5294 Epoch 215/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1291 - accuracy: 0.5541 - val_loss: 1.2170 - val_accuracy: 0.5294 Epoch 216/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1268 - accuracy: 0.5590 - val_loss: 1.2104 - val_accuracy: 0.5271 Epoch 217/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1339 - accuracy: 0.5535 - val_loss: 1.1993 - val_accuracy: 0.5294 Epoch 218/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1311 - accuracy: 0.5573 - val_loss: 1.2121 - val_accuracy: 0.5271 Epoch 219/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1251 - accuracy: 0.5544 - val_loss: 1.2256 - val_accuracy: 0.5255 Epoch 220/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1312 - accuracy: 0.5547 - val_loss: 1.2118 - val_accuracy: 0.5387 Epoch 221/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1267 - accuracy: 0.5585 - val_loss: 1.2262 - val_accuracy: 0.5333 Epoch 222/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1307 - accuracy: 0.5573 - val_loss: 1.2224 - val_accuracy: 0.5348 Epoch 223/250 727/727 [==============================] - 14s 20ms/step - loss: 1.1294 - accuracy: 0.5552 - val_loss: 1.2062 - val_accuracy: 0.5387 Epoch 224/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1266 - accuracy: 0.5557 - val_loss: 1.2408 - val_accuracy: 0.5217 Epoch 225/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1322 - accuracy: 0.5548 - val_loss: 1.2251 - val_accuracy: 0.5209 Epoch 226/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1299 - accuracy: 0.5574 - val_loss: 1.2057 - val_accuracy: 0.5255 Epoch 227/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1294 - accuracy: 0.5574 - val_loss: 1.2090 - val_accuracy: 0.5279 Epoch 228/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1271 - accuracy: 0.5547 - val_loss: 1.2445 - val_accuracy: 0.5116 Epoch 229/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1298 - accuracy: 0.5577 - val_loss: 1.2145 - val_accuracy: 0.5402 Epoch 230/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1263 - accuracy: 0.5618 - val_loss: 1.1936 - val_accuracy: 0.5379 Epoch 231/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1260 - accuracy: 0.5578 - val_loss: 1.2305 - val_accuracy: 0.5201 Epoch 232/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1260 - accuracy: 0.5585 - val_loss: 1.2152 - val_accuracy: 0.5163 Epoch 233/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1276 - accuracy: 0.5589 - val_loss: 1.2182 - val_accuracy: 0.5302 Epoch 234/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1282 - accuracy: 0.5557 - val_loss: 1.2155 - val_accuracy: 0.5379 Epoch 235/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1244 - accuracy: 0.5593 - val_loss: 1.2006 - val_accuracy: 0.5325 Epoch 236/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1261 - accuracy: 0.5586 - val_loss: 1.2081 - val_accuracy: 0.5255 Epoch 237/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1251 - accuracy: 0.5581 - val_loss: 1.2025 - val_accuracy: 0.5441 Epoch 238/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1342 - accuracy: 0.5550 - val_loss: 1.2158 - val_accuracy: 0.5232 Epoch 239/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1280 - accuracy: 0.5537 - val_loss: 1.2302 - val_accuracy: 0.5248 Epoch 240/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1215 - accuracy: 0.5615 - val_loss: 1.2061 - val_accuracy: 0.5310 Epoch 241/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1220 - accuracy: 0.5583 - val_loss: 1.1986 - val_accuracy: 0.5433 Epoch 242/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1231 - accuracy: 0.5582 - val_loss: 1.2106 - val_accuracy: 0.5418 Epoch 243/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1277 - accuracy: 0.5561 - val_loss: 1.2089 - val_accuracy: 0.5325 Epoch 244/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1227 - accuracy: 0.5607 - val_loss: 1.2190 - val_accuracy: 0.5279 Epoch 245/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1239 - accuracy: 0.5579 - val_loss: 1.2027 - val_accuracy: 0.5325 Epoch 246/250 727/727 [==============================] - 15s 21ms/step - loss: 1.1215 - accuracy: 0.5628 - val_loss: 1.2047 - val_accuracy: 0.5317 Epoch 247/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1261 - accuracy: 0.5576 - val_loss: 1.3110 - val_accuracy: 0.5046 Epoch 248/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1193 - accuracy: 0.5622 - val_loss: 1.2234 - val_accuracy: 0.5232 Epoch 249/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1255 - accuracy: 0.5588 - val_loss: 1.2266 - val_accuracy: 0.5224 Epoch 250/250 727/727 [==============================] - 15s 20ms/step - loss: 1.1218 - accuracy: 0.5614 - val_loss: 1.2101 - val_accuracy: 0.5410
plt.style.use('fivethirtyeight')
epochs = [i for i in range(250)]
fig , ax = plt.subplots(1,2)
train_acc = history_2_e.history['accuracy']
train_loss = history_2_e.history['loss']
test_acc = history_2_e.history['val_accuracy']
test_loss = history_2_e.history['val_loss']
fig.set_size_inches(20,6)
ax[0].plot(epochs , train_loss , label = 'Training Loss')
ax[0].plot(epochs , test_loss , label = 'Testing Loss')
ax[0].set_title('Training & Testing Loss')
ax[0].legend()
ax[0].set_xlabel("Epochs")
ax[1].plot(epochs , train_acc , label = 'Training Accuracy')
ax[1].plot(epochs , test_acc , label = 'Testing Accuracy')
ax[1].set_title('Training & Testing Accuracy')
ax[1].legend()
ax[1].set_xlabel("Epochs")
plt.show()
# Save Model
lstm_1_g.save('Trained Models/lstm_1_e.h5')
print("\nPerformance with Models")
print("=======================\n")
x=PrettyTable()
x.add_column("Classifier Name",["CNN (Gender)","CNN (Emotion)","LSTM (Gender)", "LSTM (Emotion)"])
x.add_column("Accuracy Score",[history_1_g.history['accuracy'][-1],history_1_e.history['accuracy'][-1],history_2_g.history['accuracy'][-1],history_2_e.history['accuracy'][-1]])
print(x)
x = ["CNN (Gender)","CNN (Emotion)","LSTM (Gender)", "LSTM (Emotion)"]
y = [round(history_1_g.history['accuracy'][-1],2),round(history_1_e.history['accuracy'][-1],2),round(history_2_g.history['accuracy'][-1],2),round(history_2_e.history['accuracy'][-1],2)]
plt.rcParams["figure.figsize"] = (14,5)
fig, ax = plt.subplots()
width = 0.25
ind = np.arange(len(y))
ax.barh(ind, y, width, color="blue")
ax.set_yticks((ind+width/2)-0.1)
ax.set_yticklabels(x, minor=False)
for i, v in enumerate(y):
ax.text(v + 0.01, i , str(v), color='blue', fontweight='bold')
plt.title('Comparision')
plt.xlabel('Accuracy Score')
plt.ylabel('Classifiers')
plt.show()
Performance with Models ======================= +-----------------+--------------------+ | Classifier Name | Accuracy Score | +-----------------+--------------------+ | CNN (Gender) | 0.9777777791023254 | | CNN (Emotion) | 0.7268292903900146 | | LSTM (Gender) | 0.8290747404098511 | | LSTM (Emotion) | 0.5613868832588196 | +-----------------+--------------------+